var/home/core/zuul-output/0000755000175000017500000000000015144612307014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015144636345015504 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000431051115144636163020264 0ustar corecores2^G) u.`l(Sm&F4a0>eBmFR5]!PI6f٘"y/(":[#;`1}+7 s'ϨF&%8'# $9b"r>B)GF%\bi/ Ff/Bp 4YH~BŊ6EZ|^߸3%L[EC 7gg/碓@e=Vn)h\\lwCzDiQJxTsL] ,=M`nͷ~Vܯ5n|X&pNz7l9HGAr Mme)M,O!Xa~YB ɻ!@J$ty#&i 5ܘ=ЂK]IIɻ]rwbXh)g''H_`!GKF5/O]Zڢ>:O񨡺ePӋ&56zGnL!?lJJYq=Wo/"IyQ4\:y|6h6dQX0>HTG5QOuxMe 1׶/5άRIo>a~W;D=;y|AAY'"葋_d$Ə{(he NSfX1982TH#D֪v3l"<, { Tms'oI&'Adp]{1DL^5"Ϧɾ~R]ObY\[)d5ADgQd5!F6JYǹ>zs;tc.mctie:x&"bR4S uV8/0%X8Ua0NET݃jYAT` &AD]Ax95mvXYs"(A+/_+*{b }@UP*5ì"M|܊W7|}N{mL=d]' =MS2[3(/hoj$=Zm Mlh>P>Qwf8*c4˥Ęk(+,«.c%_~&^%80=1Jgͤ39(&ʤdH0Ζ@.!)CGt?}=ˢ>f>\bN<Ⱦtë{{b2hKNh`0=/9Gɺɔ+'Х[)9^iX,N&+1Id0ֶ|}!oѶvhu|8Qz:^S-7;k>U~H><~5i ˿7^0*]h,*aklVIKS7d'qAWEݰLkS :}%J6TIsbFʶ褢sFUC)(k-C"TQ[;4j39_WiZSس:$3w}o$[4x:bl=pd9YfAMpIrv̡}XI{B%ZԎuHvhd`Η|ʣ)-iaE';_j{(8xPA*1bv^JLj&DY3#-1*I+g8a@(*%kX{ Z;#es=oi_)qb㼃{buU?zT u]68 QeC Hl @R SFZuU&uRz[2(A1ZK(O5dc}QQufCdX($0j(HX_$GZaPo|P5q @3ǟ6 mR!c/24مQNֆ^n,hU֝cfT :):[gCa?\&IpW$8!+Uph*/ o/{")qq҈78݇hA sTB*F$6 2C` |ɧJ~iM cO;m#NV?d?TCg5otޔC1s`u.EkB6ga׬9J2&vV,./ӐoQJ*Dw*^sCeyWtɖ9F.[-cʚmD (QMW`zP~n"U'8%kEq*Lr;TY *BCCpJhxUpܺDoGdlaQ&8#v| (~~yZ-VW"T- 0@4ޙ-did˥]5]5᪩QJlyIPEQZȰ<'$VO"d.wEр%}5zWˬQOS)ZbF p$^(2JцQImuzhpyXڈ2ͤh}/[g1ieQ*-=hiך5J))?' c9*%WyΈ W\Of[=߰+ednU$YD',jߎW&7DXǜߍG`DbE#0Y4&|޻xѷ\;_Z^sнM\&+1gWo'Y;l>V ̍"ޛ4tO,{=hFѓ$b =D(zn;Y<1x~SJ^{vn 9 j1шk'L"cE=K]A(oQ۲6+ktwLzG,87^ 9H\yqū1)\(v8pHA"ΈGVp"c ?Z)hm.2;sl$瓴ӘIe~H|.Y#C^SJĽHǀeTwvy"v܅ ]?22R.lQPa ˆSܫ1z.x62%z].`Gn&*7bd+, Z`ͲH-nမ^WbPFtOfD]c9\w+ea~~{;Vm >|WAޭi`HbIãE{%&4]Iw Wjoru ݜmKnZ<X; ۢ( nx K8.|DXb +*598;w)zp:̊~;͞)6vnM!N5Cu!8Wq/`FUwWAֻ,Qu W@ Fi:K [Av*_958]a:pmQ&'ᚡmi@ zF(n&P;)_]µ!doR0`pl`~9Fk[ٺ+4Hhao-jϸ??R<lb#P-^39T|L /~p│x@Bq"M/lja\b݋af LnU*P(8W[U6WX ZoѶ^SH:K:%Qvl\b FqQI.ȨHWo;Nw$͹O$oEE-eq=.*Dp,V;(bgJ!gF)892sw*+{[or@x,))[o新#.͞.;=fc<)((b۲Eumw峛M2,V[cm,S~ AF~.2v?JNt=O7^r.@DEuU1}g$>8ac#sĢB\PIPfwJQJ;Qxm &GBf\ZA$Ba-z|A-I @x70 晪MV)m8[6-Te@`E|=U D(C{oVa*H7MQK"<O%MTTtx袥:2JޚݶKd7UZihRk71VDqiގ\<:Ѓ3"gJJčE&>&EI|I˿j2ǯɘCGOa9C1L ={fm&'^tigk$DA' elW@Tiv{ !]oBLKJO*t*\n-iȚ4`{x_z;j3Xh ׄ?xt.o:`x^d~0u$ v48 0_ | E"Hd"H`A0&dY3 ً[fctWF_hdxMUY.b=eaI3Z=᢬-'~DWc;j FRrI5%N/K;Dk rCbm7чsSW_8g{RY.~XfEߪg:smBi1 YBX4),[c^54Sg(s$sN' 88`wC3TE+A\.ԍל9 y{͝BxG&JS meT;{З>'[LR"w F05N<&AJ3DA0ʄ4(zTUWDdE3̻l^-Xw3Fɀ{B-~.h+U8 i1b8wؖ#~zQ`/L 9#Pu/<4A L<KL U(Ee'sCcq !Ȥ4΍ +aM(VldX ][T !Ȱ|HN~6y,⒊)$e{)SR#kהyϛ7^i58f4PmB8 Y{qeφvk73:1@ƛ.{f8IGv*1藺yx27M=>+VnG;\<x7v21՚H :[Γd!E'a4n?k[A׈(sob 41Y9(^SE@7`KIK`kx& V`X0,%pe_ן >hd xе"Q4SUwy x<'o_~#6$g!D$c=5ۄX[ു RzG:柺[ӏ[3frl ô ހ^2TӘUAT!94[[m۾\T)W> lv+ H\FpG)ۏjk_c51̃^cn ba-X/#=Im41NLu\9ETp^poAOO&Ac kL0ډ}IN/S؍ZJNȯ?=@OHvW%1T~^[(" 뭗R==9!nKErHc1FYbQ F;v?nb-ڈFalG*rEX}HAP'Hҷ$qM9(AHx!AF 26qxCdP!NZgҽ9l*(H Žڒ;̼|%D Ɖ`Pj . ֈ,ixp`ttOKBDޙ''aLA2s0 iI`$t7 <8 p b8.$/օ tۓQ_Y|5EhܔS=lgӌ4U?jO_-T: ͰĵOVWS4Xk1G;7#m eji'ĒGIqB//(O &1I;svHd=mJW~ړUCOīpAiB^MP=MQ`=JB!"]b6Ƞi]ItЀ'Vf:yo=K˞r:( n72-˒#K9T\aVܩO "^OF1%e"xm뻱~0GBeFO0ޑ]w(zM6j\v00?օYɓHڦd%NzT@gID!EL2$%Ӧ{(gL pWkn\SDKIIKWi^9)N?[tLjV}}O͌:&c!JC{J` nKlȉW$)YLE%I:/8)*H|]}\E$V*#(G;3U-;q7KǰfξC?ke`~UK mtIC8^P_fub8P銗KDi'U6K×5 .]H<$ ^D'!" b1D8,?tT q lKxDȜOY2S3ҁ%mo(YT\3}sѦoY=-- /IDd6Gs ={F۴'c,QAIٰ9JX/z);B= @%AIt0v[&FJE͙A~IQ%iShnMІt.޿>y=$ts,cJZڗOx2c6 .1zҪR "^Q[ TF )㢥M-GicQ\BL(hO7zNa>>'(Kgc{>/MoD8q̒vv73'9p?M&jV3=ɹvYƛ{3iψI4Kp5 d2oOgd||K>R1Qzi#f>夑3KմԔ萴%|xyv>ķx>{E>Z4Ӥ͋#+hI{hNZt 9`b˝`yB,Ȍ=6Z" 8L O)&Oo7`D'}B 6gŒAM my(2-=#렬լGf ]3X iKg@ɞN}֑ti%-by6>7!ېATJKB2Z/"BfB(gdj۸=}'),-iX'|M2ߖ|ʒk̡"_#uTP"탕.Oj}XBfKe=cJ%wK{)qH+"B4H7-]r}7v8|׾~Us?yWfv3>xpRҧH-EeJ~4YIozi:nq Vq8swHOzf ̙eX-4`TDGq G.tݻgq74ŠqBFf8 9Fk Afq#ϛa$!qNCJ4bnv!|+R@;HspSI]ڻCZUcg5pDcIϹ,oN-_XI,3\j ]_5~' SuipA!C厐$&k7dmhz/#"݃,YqCL$ڲ`"MUbeT>Xuv~4Le͢ }UVM)[A`b}mcE]LCEg=2ȴcmV?E*-8nhױ1xR2ϫCya` A y!?h!9yL%VLU2gr26A!4vbSG ]ꧧWp/ &ee *w$-`J\ ptǣC^p#_`{ К8EW>*(D{ٛ,[fnY𱹞M=6&$<,"lX-Ǐ_whaE 98 (oѢ/Р΅ 7ցl6618ł_1/=fu).s¯?.S[{'g=Ҥ):d8h\y6Cte)n&  $uT{wD]2cM(%YjDktByxVl巳1~jpd1O9Á%˧Byd}gs9QNʟ. /ӦxbHHAni5(~p>/O0vEWZ nY3 cU $O,eLacoW1 DgYU|U--sG8`u! qGYܷw;ȌCPc_|_)xx 찤Q!b%U=(Kb4 1\)y$!M饸+ wcV?C)MΈ^RNi?u3Np> x삖A7 u/~&ӄMu.<|yi I?@)XJ7{ޱ?QC{#W.`-3})N2FVg{yQ0clz<'&zj/.y>n# rve{椱I |p)U݋7yJw&PzDgi { b#s9@*иrIA*qQN|Ix;I}&ݢ6ɢ}{]x}_o>Ml8S]~(EX{SouM Wi·yT"^'~i6֬:v~m!҃=pnUגZ6p| G;;74^l{Pclwů Հ}xcSu)6fbM/R(*ȴd.^Qw %"=nluOeH=t) Hİd/D!-Ɩ:;v8`vU~Ʉ!hX #'$2j1ܒZ˜bK@*`*#QA 9WykGk,8}B6{/) ݆Y~ 1;;|,ۇ=sxy+@{l/*+E2}`pNU`ZS̯窜qN8V ['4d!FmaX-6 y:1V(!L7,RPEd;)QϢ +RlWDžuF7LFֆoM~ar*EtIbW>jqour?qzJJaQ#-n`/$fhnqgTĔO5 ꐌSYXzv9[ezksA`<dkON৯s|&*pNaJه5B5H:W2% `6MRR'xZtfC$1aH_dx$1'/v^ZZ4`9);q`F"d1v>ժbLGd~MP%m x52LMF9 E"A,S Vo}\"X.2< 5FB΢u.`aJ#Tk’"D#cuCXȉ4 ՖK(KP|dZ1&8{9rLnMRф%V Ng2K|`ot.GSGd oE'!B'Nb1{8LW^9KbN;sö!`0ݘ/l+1L#B8U֕&*?V6N{դ}Y(INBKhx2 *MOenT.a~.E jG)j{=u^K+Ȫcv/w#MivX :)ǪCZUnAS`SK6OSxa3 W; K>窜̀'n 3u0?K@BS %fee}i]>̤+*l:\歶!IZ5>H;0)N.w7ߍ|+qUߤ^oå~4en\.cY[s'wSSۘf ?.D s}Y~/J[}jX^ޗ_-/̍ݥ*n./cus}]\>\\^'W_nAqC_oO-S_sOq?B}mmK2/@DJt}=xL@5MG0ZY,\S Eb uw:YɊ|ZԘ8'ˠ*>q/E b\ R%.aS qY>W Rlz!>Z.|<VD h5^6eM>y̆@ x>Lh!*<-lo_V684A飑i2#@+j3l૎S1@:G|gRcƈ?H(m>LC,HI~'.Op% ' c*Dp*cj|>z G` |]e*:nq!`{ qBAgPSO}E`́JPu#]' 3N+;fwt[wL X1!;W$*죓Ha-s>Vzk[~S_vD.yΕ`h9U|A܌ЃECTC Tnpצho!=V qy)U cigs^>sgv"4N9W_iI NRCǔd X1Lb.u@`X]nl}!:ViI[/SE un޷(ȊD0M^`MDN74Т C>F-}$A:XBgJWq&4ۓflq6TX)ى?Nwg>]dt*?Ű~{N_w7p682~ =WBX"XA:#u-9`x 92$4_>9WvTIj`+C2"s%DƖ|2H\2+AaTaBˮ}L@dr_Wfc>IdA Od[jlec=XJ|&+-T1m8NP$%s,ig\Z:h Ћ߉n!r}_\ \5 6 d#=&X^-kOwĝJO\Vj; )!eoB4F\jtctUb.L[3M8V|&jZz/@7aV),A[5TpUZL_?CU0E [%W%vl x٘3܎y,< )i7 Ո: tC`\?c%v7\Ct!$9iç$><+c~݊lz1H[E'2/clQ.I`AWOlw&5fH n`gMytdx)lwAK~GgbJI-tq5/i ?WǠr^C/1NEU<=co(k0Q~wˌ\g,\ rf\PUH,L#L7E"`0dq@zn~+CX|,l_B'9Dcuu|~z+G q|-bb^HcUha9ce1P[;qsA.Ǎ-]W‹y?ڕ^Pm:>I+Ȧ6' ,}U=̀*Eg.6_~OJ/8V ?ç&+|t><,BLqL򱷬dS{X6"X#-^䀕#{К4i̎'QIc(<ǩJi lc*n;YKOIXA|i޵+ȗY i}is7gYEQnNRcIz֍6s~3ArR#lTQ4Ixד%2VE[ jSl+̏U J Ǫ H` Q( xFTeb{WؔciUQjFrELZ'0ZhXUlΛ㬸}\~*ϣB6Id >D̏V LdZGe0VXme?njӂzF*ܶs6(Sf2(l(n2VuarFSc^p, G̔?5-0-fB䑸/`8/ vzH2~]R퀻vI ]7Ncye>7(mqy2 vħeF)ʵ"LB 7A|fg1~H;N+gK0с7 xY)dH}b ~ gөmޥi__ioͣ:U3jAɫG,(kDƀi7*m'')B-ґں{>}s`]=1>vS$ ݗר;p,Kl g#d1[^`s D4*sRv7kՋ%Iġz}/„|Wv-у'J2ud0QӺWo (,)Y&U6ʪ.JՏ}fn,&8p-n!q4kڎz~dFbB[8f~6OYy G]ix2R"eJak*-8Kga+y}]WEix0Kq#C,k.|@kXfTI& Ge"/@D$%0gԓU,Ū&hSH.6ib'&u):.e,_wꎺz#`N.,_nTzְQ[RYV%$.Ic1Iry7LX:VfRjԈJ}Ig0 VET]D/X4`%b b%dmZ/q/]ct/,MкN\ ] myxX8O(lY-S9B>Y~s,'꣭NT  fi^uB0roeVƌR9,,hrJ1|sP4bRIޅХxP`l@$u5x(Pu!}( Q%WgɼB{ 7Ujv$n~bkc(byUIFZU<8{:at&Сއ9}}b? b>a =.mG>XZ.$WiE(w,F'zgV>?ӿ`i8Y~ug&P`hRiJ0`Ie%=ꚃNT+?5 [p_38n(",;g(0|tI_{ pVOMb3Mo*SmE(D]Ί<1aBc;hPDA\MPS b$N8~%s.ZV>$UӲ4s0b )_/i9{U5, ? MU j4Q f fA9 y8a˳LTB7pPӴ2cåcSYJ61pg.`L?D]Ǎ#Ҵ9Kߟ9Aas˄EDܞp!Jwo_A"jmB]̊<&$lb}\,^8K^08@~6 KKxxq߈0pYl#;qz2E$$uՖ*ݝ9}ޫ P">4ьbw(Z\ QJ.:d)8Xs% eI[Yb $Fص4B<'U݈-߲]#x:i9Du]MB&c~CvȂaDfjWWxL}:mrI樛l Id7O<.0D<`sn,h0oE NpuI5ᴸޢsj=o'R8:ʇF&2>YoŠx:-W X zߑ`Om kclGӍ@"lQeָ}]]Q[]J0_"{X,V]~UxtRKԺ"*g&\g 8+O[#hw:ХT. k@"*%%8mŦ/\@̒ q pW b)`evKBށF48;RgRVYrEG1u7ݳ/ֶ3 z,`]R%\jmd|:xĢnJVtwZ2VDo .$󵏅RHCF@O[XoO jIBmR*HƁK3ߋڶ~e!(j5cK0=})LqӲwCpm4u]Ip-jO-]-SeQu|-H]5(5N/{[|/Z)C]"XAYDe2HC1uYG-#ֈMªјȌˊ 5A].k<tGo>>Բ[[+E3#`?ҝۊ_ٞq=O]Ay.b/==yt+Av;O8 U9r\--W}uX5ߪp@tՃw&m3@`Jʕc1KGJY6MHWgW0*g|c7O"CGd}ܛq|Aϗ8 UXi{ky mG-cy_m-{xI"sư0ݍ=Τ =tkv} "M u\N&p!^r!2QUy;eg7Q ?jqB\5nUu'50xڈIC66Cp^RӜhبFQjr2W c6¬}y1 uFD'a[Rјvm ȳKj}ikZ>;5֕uC発w S?EjMKTA=T9Y@:R&֑bzS]zԇb:TܝzOEuq؉sӭçߵwVEQ ;bU^}@rp%>뭄SS קy3\c_ сHŹ*МtW +f?LZ ͞)©gː+i Bխ\X j9_8%&= lLs: O2%ͻ(>زԓsstO%F޴,U9~@]tG}07) w-mZ`woe%QL`67E Kr-Jvl1}!%[N\*I#C!{Fiwfd&zCM>@241 Imk< S߉,9Uo P3g .߻Z3EpG#0Gj9n?M X >ʚp]Vt[Uo/h<|Gv]Vz4?+7P9WHk6p$Un+u]h֛XiE޼sgϣ9>yp;;?x/r@]_p+HCxv@!D7c7?^pd@T)"[}$6 @(S,`4ԠXߧYd`r6$^p" \;:w)yV?@@$xP4 ~8E;(P8l ^0OE _Ȍ\чGbp"w*CA?=E~O|e]GNw=^p@}=+| 3SdD1Tl;:~3/n;=r<]2 u0HQXh?ҩ ө_!^%@Crkp4  jmS]rq 8"I``1;b(,O@ݺd.#Q'I[d?ۦ htр{:&9מ8D ~p~Oli@gG LJa`kH}VݗQN{8 .ݺ08cPysٗ4F ֻ&aV1N{Fx>̘A S0=gb@-ϕ1 $dx+wX$}b>:o3ޗDڝqk5 'uHd3G=P0r+}1 1npБ9s 7@0޷fep2ΚOD19nГ x}9eIZNx̑1WÓ|P5&})ؽGwy0 Fa/RѳL&$K`=p3ϖ}e&]${m Lǡur00II(}:_.J$"eCHV"Qo0M9{e L 2hE,G8E4D8X,A{"ա"j஑/H^`)[kūu-BeQBK ?de%ŔXNѰN;}ʚ۱kRtj [sMjn?avJGҝYe9kiy5ՖM([k"0'N5# p2aiwM n~TZf$Q&I:L8\UW=s@-Jk ^j-t:Ds/p{DH&K$^'X2̳]i¦TJJ|W:v*DdFЅ ҦN2\neq kftF W .~j~>+@C(GQL~r|:Wd'edIzK1rvs hyZM+aٵ^%Fl#AC:V4*r74BdK R,ǟ囻7i0&H* NgʫyN)iPPm<r ipG[LPz 궂mR?G| ~(-b8_</_кOn>w"ߡUa6{}~}y\mr ϋ/(߿O54 K?z7"Rϧ2bܑdN#"?Ncr0b,R-~A*};`FTU/PFرAڂ#{$vRG;,4HoVD~TF,y<} trQ쾆I{J(̧稪qXt|K7bՍ;Tp^/Tn RVW%O c'5ꯈr:s ׹ G>{:[kF+IL?ueת:ৡȃHΊY[r\U;_wrv~6UAަ$d2h/aUesHr1@ۀdm??2-$v N({쁄]3l1eTQeVӸ؝F!Cر}2|Z|wZH@N:#y uB #TP;@;H:?P;* ;*w'T>P@B;Nh8Bl tE:tQ+K =IhS^DB; wmJ^4(LJ'')'&;/DU7b1&ʦ7ڇ"YYC,T-H, DK܇"U2+2A24_n5ګc 9~RX/C",|m9֫|\qP{w֬HT?׻|1+ij)B*e4w fr\zY>0+Opˬ KEaM2RBKMx6(F"_HE5ab C4t\M)< $'Q6Օ|BZ_1` y\"@;G֣5,ZiwN.G@w߼WQUˀ)!؈N#2ݫ,@{Z'wW 64 QgI6f  hR0ק*=˪K˳Z@5$my~Y̑ )ܜQQ$4)&`X+ s˦P)Qv2S9`&8XKfjFikKYamY9x9Tz biϺft1hoHJW͕`(sg聯 н1m-uYɍ wFtD9tX?CU !bƬ^cCUf8tG,0:TjXXbw|`Mdctc4 $pj~Zq-0?Ŋ)KTbi+²q5>Sv!= t;L5kLx W ǁn8Tfk${}؃Nf- \[ ʫ<_&.-וȲ0j~xIfI[;^ }Xվ4$cz +s5a"$o9C*#p'iMA$'pK=:a5p6ad;W׌ch|x0u?8b<}u6Jds 3oVɷ"-W R+Na\S5'9ph}5_HwtVM,EX଄u|O3u`#(IMGJ,|v8^YWX%M~ rPc3,C,:{͌ Iw(Yibf 5vo%2;Ʃƻ(IJ@c~9o}]Ix̆^Mr!|Cԫ!dZC+YVcѢt JDCMk]8щE/(F8\Rqw*dߞȳ}]HeXl%* =)1qyPw4uHnN )ppzٳXM8Wk~|xmr@m;3bSe@QW /k[Zקd.d0%8My5vK àa4)渃'ukTL?!+B[D& bV^}2 h͂MXFqt1Aqvzd/P(jv6e+ uuJ7,t`)ϴY1I ,I:Z\}QV(O}7U7OVKܘ^꒵A"7D8zk[A#@]b@kK%=\Kp`B`(8rkCɗ)t vwA T mdCxMOe`p$ݥ/n:Dc.oqb_gWNී咐ʶIUZM(j3i^Z(ZZĘTmLnMv:+iߠg$jsjBj1ʵ?ѭ$u3SD{TUGOg2~y^1o0(v<'LSB꾑6ŔFZ9';kmHeu?_fEdO%*$-9oF8a*@ ۊ]uj;z?ݏ]=T_|"#%[|F~uo|ӻt{6װo74r1Ivz?M㗝G1-V*+%+Xnߊi0QMW?E.[nc:' F1}WZ[`abz0?wDcO+o )R;79g|q'sxS;vOGmu I|ξudTkَ}TM,^Mm8oDL#=X;LtjZfcoph}?'m=SOyjө0k<U[EvVr)J[= ~~Xfkbdpg B#.-7lg?Oxh򟿙~j]5P22V33/|&Tf 4Ӳ1l7-s:MFhBH3cd4" 9SkíV_X@V/OS3j(Q-j xDd>Lck 2(؁;ɴ ^űb iFy<1iZv):U8nJ\Z]:T!E {-~z`a`Is|Iz%ŗtz|mv1l~V; ީkaa@f4Jt]MN(gy$&sƕYFSy4A`tAЏ=}"V[LkR|" Mi"m[^'Λqn_9hY19Ug"~~̴b UΚ9MƃBXTgCs(wn{ pYs J,B,cВQSİWo]9C>w`ѷCɠF63)xE"=Vt΋Ŋk YCEK`S$\.;G6! e2ޔo^]x1,sGÞCzw0NOvWU{HIvw <[Y51( =)R`զH:s@}V_Ur֔xe?tX:C'cDv0/EK -!.%KIHbP[Wݿmʴa rрpow bETlk"&n(wW`HJUmo, :.-,Hb8mc{.>!ڂm YsPsY9)&ah?)qʍb$2LT`ճAҐd>64mIY 12]R$`q? ~ r 6xt,Oxv qM3 A$vK!B. Nu!2PjQLzJ+YÉg jn388x.w_HE7g ]c$DmU 2 *` |h T8MY U*bԀ>48]0Ē1AG?8&ЇH,f@,rN7$vI;Ƕh)u5o"ފqE;Ig'm7-\ Dwf7y2+ƚJp23+9'2NACtv_s.8ޡπA,eQ̜s)6StnUpU^Jy[P.Vb۳!2#Jz|_4&e .(OCl;{U,Oh$7srpQ*d^>.8N^?Fw!op&AP룣fA s2xLC( 1FUխS}kPKxh*R 1l{jՇҥ(s . r]xXZ}k'~ݥ< $jaتW炿۶]`blu c4#!J+~&^A$xvnXXuArp!&͹ rܼuqv6}eB^D! Bsو=G ʚ̘(-h(U?O8*oq/ ]UdIn//Fg_X}:4SJO)pp+gp %6<茡$mbzrh{!Y.rtQtZt45L(qAHEiIL*:Y4Qwt%RfQ/%`;eLw]p檺UO8Uy sUPT^E%Y 4kYɱw cD<'H:op(aiJeE"`lH1*#3VK"< # өC# wuѻW) o|j0:UުC;mt>j# Q$Y )Uр."j(i>K}9IKt`lF~\wq(E;V\h Zi,#W P%CZնmWyf۶ vp*1Mzڜ},6?")_1lE=/S9nʶ7ץ5N#w}d=Ҹcޣ5PVpIlWH@ȢH6-_J183~Y6̿WM.mZxY6t[ 58+.$&FѮ1&'cbab%:mFwͻݔx.hU-m(6e=M'I{/1^¦M ć/ ҿ50Sq1gP.%Ŝ(oOKu]*J*EŎ5:kjEߞXeR_J^,Hacx㗻ŪXCDHh<*ۢ_Tm*CLS[K2øm)U -H]4$*/Ky輽QBճ1<,3~.`nKf/cSl:*o_p$eN'h9}y |?wqp`}9c8=gU*DB'+!ZD0d EL5 ίjGYfB]p\dnY@#`56/0C6נ ER%DP?`,u8GW_S8TvA8'BN%x6ʎz8ld4>-5^E跇PѽtBҒ=WQKCUzfqOݪ 3lD2ۄ^Se[)[̸103b6 mj1YDItV.8NF (pjt^fドtwF6ug9]9B:kAܙG=a9ǎWe~ ^^= b˞WEZ)*#;ida\..eUfù*}]p`UHE1j~qǚ5TTe 1C4͆D/n;6Lj`Ƥ<."88p<0҈ʈG[s~z肣1WծxІ^W~5)p.ʹDBB񜾲o\-F FjA,IP*+$5kMtf?K`*5ě;lbQHQy^1>H: m{|f8RVϽ.>h֯K[Nԥtf1z },QڨQG2' @ )bZkJec]iF+Uyt-pWj픻]0 ( g!ʫrؗ, edo낟W<[Iy>w__tIk9ak U޼JNnCN{N2n;)npa%k;W?/]2҄=g7~X'jk Gָ8/ ~]kЊJh EU,*J6]kM?u腹*gG,7]Q~p>Hsx<*]tEcxFҼ^_Θ&)[͝t)zksү,p V/Ooiθ;)8[S\HWF\KS⪵Br!Ta&ή]5 ۰/-̚_)UD,u-Mm/#0#l. ^R Nb8aW o+1P@)͇15N2\|1+?Q/jtzF~]Ȏ3IXcί)dоXovln>eY/mȌhSгf>H`%ц{0 ?h. +q&R͇\,Qo> *XkBu)]@$<NגK@,@8VeEԇ*c{>5mʑߛ1 ˲a+D ><}}شe=:^Zб1Jѫ$+zˢO H:ԫX 7NjxSa\ Bқ_tr]pyƾ~]}ւ)D[;wWٚnt{3y^6|򦾽 ԁ'FuS#5J-ՔZ!hf=Խv0.:3\)UpٷۧauKԱnH)4Z\ͷET!+PҚ!vuk29wDG#۵ƒ.JYV0*%L^p3#?lriԋ J_s __jRak&j^$ܬKjT'諭6Vz)a'OataLU|MjU]>\FLDZV04]L%`Ga`l?̔)m\3wuC JҍN)#8y ]IF5Q.z'yց;#giƳR趖/ϳNj1zv@|/ޜבVR֒^!<*kQWP$~n: ԫt\Zen: ,4Fz՗Vr9?ĺ Ifٟ`,3 8>a-eьNYp?ъ,$8M th8a#ܹDDȥ>˲ XlrcdZ5eti̾F.N8٢k53!ﯙX&֜7* ٟY:[2"s|UpA[Ire>=Ln?Dw*>궗q|$ni[4ƪb9;*ܪ"4e^45G B?>ᓟƿw&q4:'Ů4ɬOtw:q2\Px'$T`O8!@ ~z:y҂o D/+;\Kw~6wuoGÜΓ-e4[cE/9Nu[2}bU|>DbȐ@U•'*MD=@dJN<hui54۵wbTƾ x;!&; ϝ1"R*BL=Ǽ峧z)`xIp~\#=9K#g6#;{>Dk 9&Ǥck:y Ik9;9DK7 (mD&>H[AZE#b|~űRX< QO0tn^ Z#)K.u@m@sӳ ooB%G#;3h_34w9{9?%PF "L4x' 3SN)K!pZ-n|=W9I_nE pX'EKXxǪA_ϯ-`|Ÿyc`zsxz[i7btӫ\uG~[GWO_%%ɻ O%Ǹxk] 747#o-E_lr8z Sbr/;3-%7ctD=R?<[WR =K迾fFvogo;1D'_w2'|sn&wS,i~5#ZrŎ8A |J@:1HlB`),` .NcZjSV HzvMUtO뀼1@#0J <&`syu:5+zî*QK ^8 [OWʚTH*oőf7G \Q}^yM^S Ͱ3GvVUZ}URnA3z&&aĹ?pDݦ]E#kJHFT)O`1~7#e'HkN&S0a1D(-^(V$EKHB h)pp%RTt(-5WZ+#-V}Kn^- Vc/<-(0dDr{4 xC Ci_!DTҐn! ahUA-wBV}9Sj]jRq~Ţg{QFvQϬ(T2}Hv q5(e1րn)4XΚ碣]#Fη5YApe;|l"@ c)ƌ f %nT2n^a5`2K;_oOLX4K)9Z Z ^V TU*UVw՝>'n˳h~[=u$Z;/}JݴH<,_.Η}QˆTa-5tL zTj?>~}_l8xSAS7Bw@:h,uWEga[ Y =14Չ J6ˀ1c`8(!@ӈ)iGF0՞zs4lUJ8 Z+eevwV%fQ|0#8QWn~/rWIk1S| ,=A#2϶!$z~wܭ׺鶺UI2f]xWU`I[?j>j*C+5/ f"T{|g YF6CI;AڽyshHh?>|xzbf˧̶ jl,e? r8+복?qP6~^=/JSyAz%C8Cim ]On .L2jӧUkaSO=0T C6vW쩦ܟsTJnh; 5+Kp5BΏ=0l`68Z}%Gքha:JoSiy݆U5r ^JxT@@S SN-ܬknaf{1/Jؤ :LZ%|%Ӕß$zR`&Q pKlsC#Lwq\aMЂKMdfN=}/yĕVfHl攏tK*HG6 Ά\4,:+v.Ȑcoc7g5[RW;*Ѧq6CCz,b:@b1_&m@0JhqZyV}ƥ+9-yQɫ+H(|r8  T bQ|LnVfpޞޖ'0mbEi`#L~[%=5KvP7\D5En }w=N\}vPuhȋ$NU/P` p-ΗS'^x|n"vgm!'I[[>r=EKwz''30 ߹ߗ`}b %qCm+D*(n4Mە >6s0>{%oο##Ҫ`VmX9FA^f䃿-!\G@hr@ǣ#ppo޹z!&δMB)Oͧn6RX !mx~U'{I',0)6@ʴr@x"b~>'n~O-K Zrjx1xEQR-F&hu&|;W~03wm)چj56tYXt՗JHkt㢯pUBAur~%6L`X/EUKS>X?WyMy k^>Ϣi2eq+7?ƭ.r>+V| x*\1,g0h% rw?"@=yZMMIs,R1,26BkUDj0hK,'D'=FL'm3rPDV6J3p/4aBbʉ|9.Rqi=Gm̜02" EBl!Ykgڈ/|y/o3r 2k]9MsVB+<5ïfQ+$&EK1y/&>9E2k0&s5G%PQO/+%UT=pi1V|5fJFG S=IQ$0ɦTw0c ?rl_%M#WRQ_]+ .%_pUͼ텛y7Oِgx0roCɧy;;:$/5r^L.ܻmrR PU{Fr6*RV7.͚޵@V&[$κ^鷳a`%t\kf4x6> `gSv8E|si·{۝LKl{kc`+&[j)Wc WTStyp:0q0Q䁇6$bd=,+k=!OړvvEΚk̵a ~>  ow_**uSFw['g#"2OϓdWͪo%3Yk/d2 GF|DWkH8yPY䷫o/WK[%XnhZ e13*<;R1|iG]&<'}.S-|`pSDXD҅ y&(q-XC2s0P%HjKkE>[G/{To'Zmӏxسx(%lڋ@>r|Ʒ0|kvU^AT2 % dB11N|\ַapJu0G"2?3o_-, ^)5~ aKrgFe'dl%ueh O$(@]Ogh0OԌ#fdY'_h 3"9ī`H"c;N?ư1,|vkufqhckɣx AW\uok\2rךKGBR`ަ-9aC>WR.>*>7^#Q (e>>揩/)΃S嚵+~̈h8C(eyrIڀ[-dB#x<|ʅ[O"< I)ef"dBƻ1SfNj,xesFCJ$2'.ΛH&d#Nqĉ"rӮfR\dC&5bs8SU2DJj U$agb)"]ecwO_}g G3{ǝDžh0V!Vjc3L\7?i_􉗌!fC#mԆL'(;s`D40)ߣ@<:(ڵl;vm*lxg mKyOAQQ3W2ᑒq3rc8  pV"}d i;AN#:ēE> w6L!O|?|0Se]C6PAe*=f\Cw8苾,t7^0q^U`8fBFS5H5^ЂQF@Ȗ"`YR^TGE׏TT0)].3>~[QV W!H4qY:b@?xf2X\ SYDž/YCN9;YOWCgra=.|vUW6foyD[eJ a!(%WQ b7ӠjW6j@Pwp3/!}. 1e:rwr]+qCfU~;,jְd`fwmlϮQHY}X-إͥ{bD9!87},ݨJ΁g%͹jb ymbHrCĂ7(( iOASE0L踑Aݽ&7HZM9{rֽ2X ^D 0;<36(jy;~zh8U] eAB|k$ lHdaeE&dN< (* #P'S ZpG\sS4[e?G6$&m:ת>U]9Jc9Qq1֕I|lsE>[lRXćRFڱ"*%\ҁ4f~V~, o"jӶwpJ] 'OvP18g4v Q >Q|1㎕khÌ-ğQ  k83.xz ʉKט lp[,F55a By\ I-ckihQ2' `QsUx}UPNxቦW'6@Jf >]P2#OL )B *1+ɕa+ 7EnfMzǠ*!qrH=mL\uOr)BzW"D&dt\-}X/^a=fyT.8X Kĸ}?(,G+K ]#窼a:].Y0D*w5#%ueS'mˍkս|u~zBFǙTbF44ap) (%B:\ViC Ǯ--L5s+yyg'[Dd n(-_ z/O8 xQPsUЯp51LwM@Nof|CrBkGWD;_q]=`cZ:^`ECTZ|X=fM^TWZDdD+$sZtyhCTdAꌽؐ<4xට NI0xtp< ~<hSC GCS(UlOm˄,VIR2a*\bfk0!~nP:w_:V;*;teǼ:<U2zt@IoG F}E505$y m^%)Y^dj6|ʭ^`{eIZ]îhdW=(`r90ۿ=5Ww hCЖfŹKgm<5 ;FyotAX=gbWb9p?^Ӌ"慶~X8 )w5עܒh%R5!cX/Ǚ[XG=A;%窧pթ\Q+6񑄭HC|n[e|EY>:sG($K::m6䟻I|{M@ǍN6Gm2 :!`!2i8lI@X˛9xE>Wr9nSEb6**\${y0 +*?8&f<ƉY\ۖsi#*j2-,,gCp֔ Cyr#!OJV >~[= n w\_wJ-yc8)rZFmN9 avg۳ߧ_2iW v3yuSQ=[ubWaWu:G|m*ָwՎjJ7pAaq a􈣀xn"WVz!&Xm]JUtb+FiPFelh"ap|Lh5&L \7к&.Mom=<%`}PjW۪r༇3kܡxuo}(U:DV3F)D`ic5{sc=u~%)EKK}?ս/D&uG 1@|iK|70zL9 Q |qT@4EՀ~"4` AƱ 1窼nZ[_jiwBS:Ԯͱ{ktp﹁I2/W~-\P b"KV&0Y[ pR첈rqT9X(*oyɹ.;ϧ?vKJT:KX3r^z"6rX (* ܺ 5*=iPnҨ&#g#]-:C5L_[U_4zHܿ*l / DhŴ4pqٵ{qL!xSw(*5JȟIV#[$ "SPBXgUTL8r)GS9\,ރW0 Cw6K DTɔa,VxT 13rϞwj+EPc>`8TI…'s(d1窼a֡ X0Ӯظ7Ђڐiĕax@ >$< rW#(:欫<)jHulBV@aΡ6>lxixM`S\/4!$aTw/fC tZI}6E9c#q#|?5{}*o:GWk֙w?@;U\KkqJHSܮwd ,X@E-sI~b*1}5' AK@{1vQ3U!Jq6GrJB[*97zZq;zr[-=ō.Ix4$&JCR"hJ˜1QPs+ۅ6O/7<{6 -5kCԁ=Yz<-F3޽Tt):w'dtܹ˝ZNϏn> 7<FA+36N&PEPfՆpƸA몝u^kJM+ Bع&(uw5D?!zn:Ws8=!㒷Ubjw@f9Yǘ*!P 2:n+BovTxRHx(P oC8;!Wy1k[Q8A&lr*?Kt2*Vs"u?a 4wt(o2[zv\.yck\P%prp)Dp_?PyC>WvtkDdiBFǘKB3Ӷ%q<;ښa7_ ȂWJ֛b)cX!\L54Xm7kC>?M}#S+`:DSx)2'X8"~ƴPƤ\؆hmJU.]ڽ֔i =gUB +|I1b%!CTahvB]&E2clAPT.mI1K8`r=$wCn5-i%z< {D)QSdU*]]U]]IwRp5#9A\dF& 7vY "tvm]:gl&l>3BH]7\+;s:fu~ö'2ߵ_#=RZQ"^-w(*S `]m"p)jV@cլn*?ylN?5/=,E/1iQ&S۹MC,Vm ,$_fV}AV7UN3h[B<"PWe{?m<MN5x7JZ3MIcX]Ś`("xʹB#<?$}`n"[c LIhWXiLj\&ѕu??'e2vv?!jmHPb֞wwq/~4?fo|Nrs`{WQ6oA"jH | rhC t>t 1EEq>-#Ur'Ĉ$ɬcG·C 2rx ͵u bUNZIhlwaQvi`~7`ˋ IHs5+g,Slہ"g} eԍmiN@'N0TX|(/v~OA£CD<$L\&;~.Sp @9Djn ^! 8k >0"1&$кi)]dWJD  $a98~ŌLokne? r\;AevG<26nQG% cw!7#R,q.` 7BuXRd`ld誺&uAfܭG䱋:6=y0ugcGA1Х ϗP]04MZՓƽ ʓ:OMK͖볥!M+!A?9ym1 " >!dʨiAFmqBԆ5*Fƛ9Ȧ|dQ˺_ė3ycVκ/ʄKfXj;$#% jR 4a $7lPi~;R1lŘb]s@fS74c.M0*&耩zZei#/B2?; =Ŗ$uOy]|I3DFAyL~mcCׁ`6Ep^_HK5@zB 5@Z10@4&cuHwTWduʌ֟0,JCE$6֦T[E-6lׇM ׊1㻥((z?9і:{eXiŹUX&Mm:&T_q0 ^bV1L/tI#ك0#r䇓E)@YO5/` Rgσc ,L{PbL2m4PSdiгί5hnk֟걥~'~XIc>cR^,BoP= ³zC 4T> 7.ڵc *(u+()4JD܆3|JIao7]:?裨z L`J3*q8n^cy(RcEuzNրO!p(x43$$3es1gWdl"S҇:TQP˺w`O{HBN4'fvwGئ $oK|Z[T-={esu3|ӞKNsahJŪ: O`D n1_?Wr?u 'gKe2V%UB3C'["<,iK~Psn淳^FI*Jnhz}.F͊Ov(CٓOȮ֟֙gW_MA!e<"dO=W=|#M|zMsFN&d^,6MH 6W*V(X,'u"Zn6(V~mA;L.yɿo1,Q͜:{S9Z̹L(Ҿ&2MCA[+XyUz. |զcD5͘jӺ?9[1 9ۦ&>< 8 ua1AT 0rQ=졟=Sz c y^!Ho|ßd{O?T 2bB|Z U+ 6\Ҷ#2''/_(Pooh74 \-:p+*Q >Ow7a2ml)˄2Üdt@}Mv^3"cȰsm0 Nx̷"F !y 2b!ķg0t-Ơ>n_V-I,A. b_TZa-uċa1PD匇\{ u>bL69H,"d)ܧvPCDyG8[m)_t !#ڈ1d;/XT*I~2 qۣRdw=W4Ⱥ\ac2~\C} 2Ϗu!v6"IC) 7)/-sANи>LB<STጝCA)3gyd&3^]31%D*Z9nL';++#* }tݍo(< 8,78cE9byex]uI#cEJ-QS|lnkŴXot[gw!ģkqO)%g2^n;:)gj'Aӌ{h7J lBŘ_$ sW?|)ͩux6$>B FcDmkf(FZ@,ǜ~ֆhZhYy*hc >=EHԟ"B;\m1@oA2z}<#ΎS,hq%Լ##_̄`0p.kvK3 0@Bm_HiXcU@#!yb:DEuB)o .Af譹N3:NW,ca`u FNȥCLY;;Rz.5~Azc=)XF* oPgWbLwC[ݴˑ$ARp]kCΕVs0KVVaO?}HVMm6ys2#֤MφQl-~SlGGuOq^yVaV-2v 6|qC*m9{apÞQ$issPiՏ+;_78{ꇡJ)"_G[cٳueV==Ww|~$I EoNYjޜ T~5UVLF⧏ Dwɡ_Ue`HLkB<.hMĒ8MΘ ,x -uuߧO>O;lϳ􂆖0<[uZN?ϟr]7Se/;E}6ܽ<]ŮBA_^yA mH6|m<]Y7CصnuLh٤ X{5* >ZO֚v[b9[75M>,`fjN к 6ѳх~+Lwsw~jMR5Ϯ\=_G~΅-ĽvA% 0N =,wL{;?tY eob-4RQC\w~@7hr}uEz(ή`kML=g7wa|߮x~pvԂJ/eEM`c q%\mqE✈>GEy;Z']ۑ3I6{f8&K-&0B#)6"AY[4v+A)(![X+Ŋv-()~F/!𮍅/ L}3mŭ$G?iJD_wqN tbrBvzMW^g[ tr34Fr0孯!6Ii$Ct}x˂3R"a8u+u+mFY(yކJ,ꈕJ+@,PVv%fSbu荐0zy Uo4Mn }$?7'ݯÕZ3-vYGI7/#t=`S93!eLXEp©ȣ\x=qh})URakxMY۬gs褏o.}&pnNut }D]&Ô. ռn T05tvۗqWXINLUBq12VJZ32T$a}A<*o{Ij{z#br_`;>05{J~׽t9Rw. ox'ݸR7N aduί:Ǚc!'>ލaBH.SG`8Ap}'J C82Jp䵦VCh;!NdMs#"|OԵ~!Ir}/ldN[af F7R}P )-7ˬܨk}i*9QKAUK-L(LƮ0a゜Ecr1MYѝd[+N\@W~Сofb|6'cOlJ/P ISad^jڟds1eE<:ME "(F(Zv=kѭCh} RH"C,yd881U}d X -P3ʭnS^7jG.ܣzEsM>sƚ8'ks^_^T]yzlkڵlj¯2' kp8[۾V:Rx/)mW4+6(k`H]~x˸har&ᘡAIU7_L"!}pIf,ŗb4 h|"9mq 2# 6 N9Zy+֎9JOsF\&?.Znj&WJ1 _e1a8KܲK\5Is^:z zMܺoIZji؍#&RZ 3P&jlIjHyb]EN \gɘ]0p,z2 E x)@KSϵH k ,$Vm'| K73̷ ΥhKPT\TT8p:3$#ML9SmP|l6;1RYE(qA%6뎮T|UNN yi4aO1pPe. c7?*\"tA+"'c_Z0DQ^fj8rM.i(5֊2 pBEH" &UK5ju>n1㲺]."XGyrrEY?gwrCAepH #g(01iьI~z^>ce*&2.`jXn)-KmdA\`TQ.Be$#R2F_9OS r~`F:Eal`XW2}[$r^9!I-X\!j 5py(6t1 GF{(3ҡ4F͔|j u}&|\bKn siG蕪a&yJky>kl`=E r@NXYa0L<322 DBY]-}6}Ƕ/fTjIhV酥"d Q *``T`9%87qp+~{lgT̙oY5"8 6˸' (܄,l,/7?&4߬4;Ow>}isH%G&}Uq"H#ULBqڈQ$E3-'ֿ86w~+\z~{*JpX^fZo[cٹM^ωs~/jd3jۍ殶_^t{/+(ȿѯdoM:|nI|V)vtz㼨UoU>MǶvt(obݲ}'(ưvLE $MaQpgrnʘcb{mŽ!Ji>bnY6b0ƺd[j{||ӪBZk* v <'cV \rK85Q7rĻ웎`u*Q2D86G P"jXw7a7珩,Sswn}ᢌH$.BBXxJtJjd A4|)t󠆌yA 9E fe1GeczK8@t<)x# K sg l|R'4~Η߷WRIY&#C˙I 7'f>,D.o( :1U. 0ҼH"?!qz}M59̘c,swmm2` 0( GxbTh@qdw|" ܃sVxy>>9WXʿ3?ЭBrݞ'Gmb)q=K ߟ [#-lΠHuS~ ~Q6ֳot-=n$WJ"+Vqn `W pc$GV`1cbzoζ{zNnx{KtJ]A0\R/Z^3. _vR4^7K7os]LZ z~ V~8&ڊg<6 J ZMPTayiCs4>vӌvRϧ3ai>W-1W;Л줻qwE>#\氤)&0' /g\:+Gh3) q `} )؉ݟ?hp}JMwa}hOYz)$.E^VI Xs``$,ͫ,G⨥0^qA*mXXLH479BKA 7?j ;XR)yZ;3EdK:(_C&qc4dڢ~9Uf|49'´ ̧CV_l lҶ$9D6Yʣy՚TEfrYu7Uذa]HjRp_Eӣ2>!rgƂGcaW|/c$fsYȪBwi2֋dgYRR]j=GzlZyb**8:B LoLVs?8g]~c ֠a"Ey귤1"f%(A;83rYxg^ ggu3cK/9ú= R>}y_}hE*%SpLB!$7fs9-Kȁ}~ W,1V͑`8z2GBpjZxn#<<.Hxjk= +t5=89ZsL@ԫt0I"9(8 pD;83rZ Tz}c%ٻFr#W} 2@pw^.>$vDzZ)^,1[&[{FGj&*Vnhab]h6UԕY `BU@A`0j4*Й_si#IֵGFapWA b\Z{YADUMp]#UXsPͩ[+g%\%nN`z4xa8(tTO#08g;6ӏ-$)5zWϘӛzG>kcv~fX bǀJ}k',W@X ^XX p?Ag0ǏK*KB0)5V3DLd({H涭Leo\V'QYTŸsQ >?Z\wtzz|<Qr$f{ 5"V&pZʢ!g!G &!ÎUnvՂ4`m > ^p 24fVN@/aSq1kߴh16 HO/ y}\ɉ3SMN$Fb$Fb"@V1z:xa82po*д 9P;y@ɧ^9rE&n>?!zVJr NԌĴ4l @͒ZӠQ׮-eF)I)z6ƪTӮ9L>ސje9 MI@cD.l%m( #orrRvzzd'~eB9Lq@Y UzpNR,0P*0vL>uV3! /xK ӏW&r4N2g2ρ혅>Cw&N9h4 O5LmIf=2 S^ɗvQT!4J+G1RYqZ0C=2 OEl8,fp^Iϱ 6_!8b&j;nҴ=e+)i/XgyL*DQ(WS9*FMs5 RׄasKQ 毨NR!WTEz>mM Z 7&Sl€d(D7"Ƣ+sHQaшU v!؅ٷtA-c85Td3mZmM Mb~V,IGRKT]$0j,'|b@:JJNdbjf`6Ӯ>ZS='q"%qگ)Pgj-ݮK Q MpVR(YM2DOTy.gܬz 7p( gJH%1ϟ,'y*M}zd'CY^v15.ցPYνpR)Еӊ)^Hl@)qA3`neCIʗܤOF=2 #$ ,vHǴl*)LIaC Z34thneeۓVzÎj J"]j1ki@Ы# ]j:˔zdauDƤ#n KFؽ\`Z ȮKP!T\zN cyiJF.{d'C $^oI"'8mcP̟ ~Ftgd85shfGC[IUO. FLNULل+WQ fQ )óNʊ)RIqk-!Q I+0Fk#e-[.]>-JDzd'C &%x&%CQľWyB(x`By'QϗUߞOW*)jp 8\K,vj^Bg&1Qqpsקe>0(f<=}hhﲒ?+$5Q =o'eqe_ed{jQ[$H!%xCPz"g _"-f$0 M0HcQ11籢$ϘDH'AUIʐkky~yCZe6CoR_q${|(lRlGFap2=l2R256)5>#- YhTiMݸ >Q?qhzr9=2 # `r)'( Np>__9ݷamq&s:#=2ʂ4C@ tL@aXY5Ѩ8d;P40hq U͋T=6qmي*HJ%90Qh{ĥX h VQ=IU㬑3uL2w?yϪE!I9OPRqZT%'MQuR7zd CI!# (/S2T=U^W`RQ %Dv8g(Ls5 -? 3Yl33qd2GW i n<S^ F@Kwc:LqBA1^6aj>0$IIiX%q*qީs@vif>2ݼ/.,~$CUs9۴T|ͩݧmT*{dv˦4h_٦tG0 |=k7!YLVkBu5_h55+^V;-UYc)5a ,pMs7WڂYݷQbqwq߿fn|y}q\f&+|yӚ ♬52 _f7c0owu0={" Ow]\Nw'fO |?]_6ͷ=ff+_ɞe eP< 09V*vZqTO*HF(z2N[!p܆M>ov_}pb* )krߵ_|?85`MvyWp`d3mGpG\{yx]Qkۗ7۹ݷaܲAf qZKCc WJ- l3^[H&̿KTbG 1*s߃gl2`J0m`  j5 pՄhSf^#dkDl`>~зfy1mlg*/~C}ur&)}/$J3th[pC!uBf6nVn?E.*kAǭ Ă壂b: ,7kµONcDMk2ju\&)$p?ӚSM3A+`U6yeu֭2&k5F GXK)K@`cX*x՜qRU,PDfE,fv$\׾-K2:AOyx*+ë+dfRU0!xONUjeZIo~~nqM*gsi_ͬy gk^;IeWlcd0p5`yKX*msBUPoUE {%(ljÄno#ؓM/dud!yuA*Vg.A%h{ɞI/`Doe]4ewo3i|KHm%#}bŇw3X33 7se&4` O8w=n.+&cƇY۴_E~c?d:QvC||~F̋k V=Zrv{ :_'x `Ǘ~bi2Wpr0[3Uph6[cbZg^VYj( {eőS5ZД?o?iˋ$3;R-p<3&C9=&VtboVgQNmRżMS vZZ)A_űrH5WW뀉!5RwҴ+ϼVy[e{^c>H a-<}dlNlDC &wWa6ihjCXO&:oؾWy7]w #<;B63杁s@{?+O ,׳mi%EWo=sԿ^I~ LH^3ػ6dW `:w .bDWkdHʱDԈٲFL䰟U]jjx&CDMfhp8%b"fq(aHFj"{7%AAahVkxc/>kB>Xϲj?՞ِ<|L*f˓9ܐ]AW X?b]1 iۮ،|UQ3Y*nq]Ļ|2S2`K.-2=uLkL,=j8BSϘ8P=t:Fqz4‚Sg(N<=܁A6o&ƞpTv2}y0[Ug$aB~CUٕ 0/_u#m}g-1 xM+ż绻-?&O"]`G03H)0R]C7KX۴75h9?k9?9'䜟s~r=to}77U( ,ڋ@!Jd(Lk!3I$SPl8'$S:TP `4>- fe8| 4_?otQQ(yIg񶴰LEz%ZDqh=Vi=f =k)る"KY"Q%#S17ʰU2DRE`p^fra?ݢfS'AXv-S`M/@L fZYN+w~th񽑎y,R,VA,6zCd)50C&59.1|I4 {pJǡ$v'1ag:e:L'2PtBY3PGEl%HL}Lw2N;~'|$ R:W@,WFrT0+ #929rA rA\#x@HJ|7rr\",r\"؆g1:`L19EQMK- . uDh;ۊOC,Ll^eYo@G._r|d[#:ԬK缛 =4F({5"L=~IWSuĊ A^ H4G=aZq,\Dz],apM)pZFe Zb"Ggm3d5}{OjNԸe[>$uV`{xP`#%XhW$L4嘁("cFyH:|P$ȵu6"#ށFdH`(,*Jb,A#E\TQn@ 4"8Wj'eoM_Z0pF$GVYdSIL4 QxX g]pr಴.DH3EI   N+LX<4+RV3`T`\3 + N9(ły?J8o@Jvl. aXĪ;])ivM$E³bޥ&I{I|Y*h:L- Ǒ.4Js"5k`LI$BYa.ad|tVIl2au;~{@קj1El4sUIO*N{j5&Ų[DSS0Eޅi1#!qӻxgaAk܇tQ:&#.,U)xcЩx8Y*!X6`%8kOw7/&lZ 麐0IjuwTC{#;)(udm3;.gUt85iatHe//a!{l&Jgb3~jTsN=2ݪv;xڟ$YopLg7~}ejw,,Bݛܒ;;j6)*\]>L>7#tg~Q$߃ΔIqJJ'`:ΏM'؍$/+|uKa;o~x]SlFVl[.?NvH2;֐Vo׭]'0!.Ntf.wY8W_;d)3֜3JՊ]gUg(Jxi1U\gB]b0ԕ9ͮ.ˋo+sw/@}p0E] | b}T/vNjiuTX~wz~\k} S'Ot^&,Tu0^J.hofm''dL:S z#<u022/@B.R, J&$L/Fok~zk2qOGtB=ŧV⦅7``Tc<;{բ4JCښ6锗M9 I.i9|BS"b"sK0ġ.Is iIs* ǽ(.M4aeo.%|yizd=l ]@6P㵵GE[J'I0*P j4![,3BR3ms3Jg}q,8_MgVrKmdoɨ)3k M( $Bj3 M4' 6_e=EA5YGΊV21KwF8DD2hߪ ApHD$2 RjDs$©TT!HrY)8ƴe=km8 aAKZ`H9l%qQPFƹS*!Q`=JUMPvP78;2GF#+R~Q _J% †R̆=9uZA-7g?)IkQ{{/K1fI,3! S 31!)4ho5jK&Ƚ`71A nӎRczPCT&K#DN@~Y%\ֶ""0`'EZ}|JDP⭳s Kx-A!i r06Hz 0VhGkv= d\?ktlW4%쒇R2OHxT a. uY䂌4`&Y`0QV t8T*3v֮~])Dy[ qJm+waQOz_@4Eͳ&JC>i;{  kә(D  WK^jQ0VCeOC#e͸-7a,oiaZB#.δ -2цn}>TqrKk;0bYڱwvKNs lGQ~3hTXӕ[جQ[׈o?F|^];{XjI]JOˏ?j_G߿< ؖ U`h_E# a, ``ٻY /biH-IYQzoq!96)LwOUS"~ ?xp<$ >ym>RGio1"o(%."1>8i@pAT V緤 Yp: .Ub՗b܃IIOMa#ic\R)ߒb!~ .,E $FL`e)|rh4sEb0jIgmW(%cvqeM45 `^e9Rb HDÁy+3V+ v)l|zc$vI n 6aEj~9(&1o]=fݺSQ<asIM7Ÿ3&-4b_0_sݭ/?5Fܤ_%i QET(18S~{DNjeIV:Uˡ]'uѸOocL&(͎fA&K|RSM}M>r􇈢~A! ';la(«FĐQ6zS Bk`]C&3%)A`/Lġ(#kmozۡ^Kt3\*l gkmCg&sUyڧ9VN}(39vn6j11*qJn0kN\?Z{>0ע^/==TTcK,29 Q ƑZvv5@Ism6%l*ai|1͈|i})+o+;AoAw̦tj~rpP쒗fH=jwtUgG9ż ֓~37ADf.:"gh(Ct"iZ+IQv%np8=pxP-A_62mYR*9'^i 0Dz?-olخLzǀz)@6۝p׈&#gV.ka"9!n5ca 063Ж!Q ֊R-W%ڄ33g%$Ho=QVFY3H׋]]kZ&H(HЗɩq:b5y@U!z<E9DŽRn&x9)^GEdmr؂ǧZ  du Z'jq OSΆʞW.ijoo 1)j5?)&4P0i%s!CZ?Á =~fw4Z9նFtjHe1:-GWVfݪq*Bzp. xdE\{) `miűPsipv (WKM)pZ]b6µDX/5^#np], _̚~SHR5e6ߝz~o,kƃ* Œ @"aH) Dk6ʋ7[t \|A{ ٩Hw"%1 EcvZI%hD0A  ^tUK2N?zGD% S5z$9E"2M6`DF!b%uY,GD疥kmMMPH*>녵`$7 s iR ZSD) )~_v *N;o%9g?JCgzu?Sk>JGߏNK iA:Hllp~\*#5+`u?7a!V^Χonx B&F_'1Sxlj;#Yw3n/+5&Ųwݞ[DA䴑>!a.N⎉"3 ָ,GgDL^_9g|>V=jE8X6Bol~y5dbP> IpFAv{)[tQ J1ugf4 7p66wsz|6W  qkrTfAp,d]MD*ىЯ:`av3~jdJ/kٓDߚr((o"{qQǽQoQ<υQ~ƨT؉>LBi̭lcJ:]/ӞfMϕw3kI.Q3ER=el76B'܍$/ <ڹ0>wy5Xk>$^X]d˭yln$rvZC.{zmo _'9z9osTSrZs~tH%Ղj&rԪM\E /ͦ61]庚*ePKsݩOZ}S+0H7.CkH\^uTvZqi//YPjݕ׵Wb|n.Cj/9̢E?(mԠꊚL{.h~MwarګI]nl~ dOu$]8A%:0mG @gK'UrIN<w\]|ӛ~ի70QW?^^ZW-m놠CKƼK54!mzMڜrusWqMR(W};rrpsRʜj15&O5rS<娨^PEȼh `NBndMQcUXTZY=€ωepr ZFZ,ԃk, >ÁG *p5f>o)'BDLD]`u v!Nc iw_;>}9з{DJӳyo.zizd=W4$""Q)j7x!tBIpp^DJPҠm!M$ic3ˌ3L&zJȮD_q͸o-ݓri86ݪX1犹(up\S4W^hj43)1PR055-&=OI7вq ʚ GI,".#N#D$Q%ڷgEC%DhAX 㑈DF4QAJpD HETv"-Ykig'K qQn%qQPFƹᐈTz&6@*P7~nR(`dE8 K$UPQö!(xONqVkPˍ{O~W ;uL$qJ)!Vi8zl8b|[2Lr!"nM/d(pcvR1DSZ9IMXfpYڋRU.FNEZ}|G C[qL"FSP(s Kx-A!i r06Hz30VéhGkv}AлѰ6:!EyH199ԣ@"LrqM|h!sÅp!7\ rÅp!7\ֹB rÅ,Bn.XÅL=3tәz:SOgL=}ӧtK*fKp(ݝѬ<1{0]d~V&0wHsIM G3w\ߙ;s}g3[D@3w@;s}gdC3w\Y2wTc3w΁\ߙ;seיּ;Y\ߙ;s}g3w2w\ߙ;s}gm\ߙ;s}g3wN6ilP_{6)k>ant>}G(Oƛ1X1b )KE$"_hY"MP7p@ ܍Ck3UvNi˱1aC7-dhl`nJ;Zc5ee9D#ޞ CmNW逰Zp N)Oc.*5XzVZ(+T=Dջ:_৻/ SÅ:סqiC :o"vk0V:u:uGG xҒh,P.3 )(ňۊl v=~.~^^wG/ô nT LZV!Ց R^?{WɍdJB_Ƙ%i7v ~C)TT2`ʊ#xAw-oQ[͔!<g.f©.G U6z==ڳQuI~>9?35 {HL˂zh؂4'p2PĊ#bQOЌ? ԃSA*%U-<%d\邰.E:kk30!c-yf1!s5ϵTdQ;Ҩ̉ӍpnMa}mW>! TmW7|o{==ÄYD4?"˓B;2@%8H02iEk~gdHn#R" P4zj!Q=|#7gZlz:)(غ5 o޺bASi~$J4ؑ`. ֿ QWbBr ^{r'GK)B)vLR)粠!w!cgBB 'U1eP!F)bY-Qkr%w" LBA쀹f'G糏El)j+㴧 猪'˿4 Zr"x l4>x/#xOA mET{mb+B>ia3&{gK,RGo@sphIZg6}9;n2%Fz@Z.޵&)'01dZ⥷$s U\y]}]eHϡ% Iԑ(", dtȉ,eJ k9kڔT^׸PMyT1wff)Bohf\Tqm*"굤zC [Pr4 avz:Klo QͯS^7nen, @BDHԊXǁ`h2kgP~:C[)MXL\d^FW!|OʄdJԿ˕QŻD֒X #串 7Տ4ۇ3u8<6۫Gz;S@YG5ۓ Xwm%ݛnUy[p@4pcF, _*TGX)DZmQ5'ޱݢnܙg\70I*ngz#47`c|sm@L,S G8E>ҫ+nF|p4=Kwr5Z6 # vg#-SL SE|XeF=/Օ_+}3Þ:OҢ;+Ǡי (Kd$Yu;}Bis}?, ;o_|yؽ)9q7pkߦֹ%`m2ͻjUN ?5#pQ}n3a|6;'{7́^|Zź3$4>^3 ?,mܼLTՈ\ѵn%ui[+E0JUrT13mQDL"G{ݓ =c>֟el`\ ڽbpZN8oDr -APePAwͿ2)ja$ulMTzDk VozDݽmN&i+'Ӽ+zX2JdT|vzK] ޻1PF6J&4iij½TZt5vJDjכK6`@ȶ1 R >xk-=B.2 N#$ ʇaѨ+vV ;^&L Qy,˫/n>;_>'iZ}GL s(5/kŷb"~Z-JИn unZH֖̠_:_:nQ50F^L`+~,@>02DbMCa𙯺O3mUjںƥ;T͋CA^?7xJW}X;^}>+_3vb2i/>|XV ߼]}L!SWIxt[ta84d1s#c?^չ]T ê\N2*BeSVߪٸ]W*8^p]{ Zy2{+/Pa ϏKfh%p_$7-t.ڼzE)s^(a)@EJ\0,НuqmեuqT#h]#=2[g$4Ɂ Ljby-֍۔ԂDq0">Ch睉1TF ,10>tL+$ QR`x٢RCб|HhDc%itpr E,o_#xCek| 1 dWJ!B0I_! xZ<Ĕ 9b5Z̲94D戾Z@IҠQv5ܶh4@#6q0hs0t_!xaʘ::E (?P5ZLeb8BB #0'aZ-9-QZH\z<e'$qeM CgVNe>UTtBU'U;PmqNPR0کn͇?i/*̚@Ka01ı <6[ Fk$_V7# wLP *DŽ?.xh=7E>!jNS<8#|FB Jֱ |EU)ib8갦Pg@ൔ3!3Q cb .c0D,1Zʶ&:4W4;b$&¡BRGџFB[-6EK6=aʉa"A8b5Z/%GMBļx,P"hB M[2n2#W?`L65Zkל`dШK%L Ύ4FBJ`\㑐a6W.qaQ:c)٬|Ym>DtMr S%A!dLЀ$mXFB ڶwxXQMXWTs`F -#[TG-- 49*m`H3f##$FB 7H˨sbZ ´DTd1|%eW9NY ."kFE%|{O9F ˢlI̐cN-E2BB L1l D۞$M<db,[F -s՜蔥%L)Db(qyXFe9="Rit1m8aD8M3Ix8NIdd| YT'{;8oY]pquِ-tifrvsx.9If9w.- !֯zu+ ~W/] kÌ @D}2 v ^qtuuPRPHL3 F9sLa0!U7 +qb@˓7N<čwF.;>ncvӹnc78c37TЗp +BolgWhEcQ؟11 ꧞Qw \p7$4?NC ]*P$5T|b!6ͻ1_t^Unu^r*9u/-iMj/{) P/:3mrUVg_pܰ2sr"}är+'V&Ѓxؕw8?XCe5T>(]{⇡ pOS;B%Sޟ)w^ I_s'59ުqQU(?~XâB\hRQ'$Y?=e,Q>/7xApD^iĵ,Eߢyܥizv72͏&gΟq'5 LWc]dIGg1-eI,"%aJ,"YE~U,~l"_%9t܀З^>}~2'۷Z>4w5գzuH(ߙv}Cz`J^]$$=w*yQ^5ŧ}tˇ~+X+|>r?jY³>hzOoz,pӇ| zӆz^-^۠O^L\~y1 e/v]+jSzy9BVZ97g04;qc^uI$fEu LƠ1DUDG9ۨТkyOMrK68)ьC*`t_ "`k U cJ#68R$#?g)Oc.*5X*,LG'}SY|_ x{Bu4 ZָRk 'hi_~v괟LPF\*U 55ULzn&WrnayR x-Jt44?N/O:6'5Yz3d|^GJߵ]aE})cEƹوVu;j6c;~Q67/ЖU]\Yγ7 vd1?ez>{5K'XlVtBDOn@on*eVE9h t5 0ua"Z25atq*Y0ƛ&Z|H_,+e -=0鼇ZX[ w4#O6C %'csh!`.EZhЖ|ѧm ,D(LBa<ZXH_ЛYggނ鈷`F)&1ǸSsD i)խ3,Yydc<{KIa&aXǂӥ<1Fðqe|֚8:pU>(RG0n xfɹO/ '^ezctY!xQIPY /Y=<,RS)icb5J|5r^훏mby2l>]rb097hov7qvtptFㇴT .f/ײZ.:mT͉n<َFIn\@$*8ʝ7\tDSϘ8P@E NVFmLeSbN^S?֎˫/w)}miW:&m';߆NN/ uWck]4K9qkdBxwrC.ٓF BmvCq|呚Y/"-V>:Jor«" P(POPk mA¶7SLhN-NXM - . X;"BF|4a!+=;bK-Fw rlvB3 >N u:Ix*TPĵh06z´JW2ΥQp,[h"a"+¥&LI8F.e)A@yKȳ#sDsYO(Yϯ{&'IzJߐA2s} l(>}t%aYI;/r]X{xP`?Ih|E#aJS"2myo9$PyC(-43O TxGꉔI,@-XE$)pR@q߆~}jqC)+;n)5z$9E"2M6`DF!b%uYa>YtEϵ4mcHgCjא6]?.v bBg`U_͈ > (iP 5xs)*7c||(YFfΟcKd q/)3~=Z >YA=[OaN5LJ>nf=^ٽNhnY[+Aw:kZ7,-j=0TqJa0*N!1æ9>ÝAi2BP:kUX_}q^ś/}z}Ϳ͓_D7/~|Ο&&[_$CƼYT-&VM&ow|X}Cu4 J_w~~|Ocrn.ô[MŪ٠]6}񳯀`sWQJmnؚ`݀pyN|ZpDyǸGuуv;˷1^p)X떦o۩SX1X˰KES/ R Ua|*;u;:2?b$J3뜈59Ba"0]=5;uiN.w܄}F[ɬ&< ى3J CD x[RS-yA)ıH1WOpˠ5q}3j8u͉5o|WaLҎuiԁ娀j!`P`/NQNf&{oPvBl)c)#FKU4#REt"z%uHKeA7b?'b BR  *bRXEd:2"liR5 D4VhSʝT7nc?$y+`C&0,dA@qȉ(xLj8Rﲍ~݉"҃k< >ÁG j%pT`f>ٚo)'BDLD]`u v!Nc i>jV>}9@yo.m'z ҭ| @HQ iJχ¤X*P " $ 2#$ :v-qi=%d[Avtc>o5P9صRE.-;(,L!b.J͕M/`qL+F6dmr,X|G[3PDY[X 6a DGq #N#D$Q%ڷjgEC%DhA䑈DF4QAJpD*ұM`FhZgK;;$>_ LLE `(2R42Ε:1gXi i'-c[t4k0yB (i6jm}԰l ޓS\gr^{+F,:IskEɠia`JaF"&xzDr=E|y\Nynp9=npً]$mi;wr4ѥ\J3:"c@*˷aR|6ڂ- m|,u&zGG}U-+[_y-+'oeu t{mFZGBZ )NfLϻIҰqaz/aT^_9VD)  K  ,bGv:KE{ڻe BQGc`{o`ñ:p(Qak%p sL(QmGs+:2,"sGkZ HZތlM7#oB'[vܼؤ;^A3wR'WFS{l,E0C@5Sd}W @ϋK\'O>T!Ǐ@J3~[c}|^C%6'^#jPZN'^u] H36}oL?f%YPQ9w am{w\Υ ]םK %1M }vhp/˲>:y43h҅0{Elt˽]xI>B %ws= en͚)2eCfܬq2sfnl)5Ffn͚Y37k͚Yذ2(T͚Ys/27kf͜C5[nfvY37kfܬ5sfn͚͚Y37kfܬ27kfܬ5sfn%%ܬ"sfnY37kf͐jHY3ܬ5͚Y37kFZ@ZB%*01qA*K`:/ kA?˶_O=L1giJLq"yϿOϠc H*sb( )$0{KHcYl/"+x4ռM ]i44xCEƣf4|x4 AQ 3wq.ř8sFB!::[ uBq83{xʂKVt@졈*.8^,`xpEM e\b3CUV:f$wHVau **9J(Y܌cKl] )5%մВꀹ\G2[Wk{}vkm#G9mRC < ^m!8Y?nImɖicFY$X_YUwkZF( S hkˉ0A(Y&jTBY6z3)g|f݇eբB|c"Z'DԔQbDT1%JWbdEb #Gxs5q:Y}"gv9疛s!W?n쿆='fiO增z˺aK.fYޣ T{`ŬtG7}vzÝji,FW8Z_e56#}RhWƗB|4VEmK'AQqu¥ٶ¥ٶå¥$ @ #"Iu)mb$A .i6*iH3St͖ >P1@CN jR 2), tT@ dZt.y8WʳAibm1q] >+6ۜS|qmSo)L6)NdYXTFDu'?￝]ܴ; (hd snqKF'kE~9xy^v*9]uq_4_W!mrMҬ)xVn.sy?dD߯E Zox\4PMREXcDζ6{mul=W|61cԍ~rÀK8UrUNogztZqHA|SG㹷&ۜ`=,y&p znz3.T@ :KDI_ע|+?Sb2 8dQ,f`>7vLY֡$̼X0Pf' Td4Ҽ/G~-|vFQn&'Ua17ĩa@];Drh?{ok @bȖ}0@k0t3oPp9c4Q --!RьQ(Y8ypwZhCvEw${FK2Y9=g:jP,HB%EgHSè\0ol' xu6 *Õq@I 1jM8h-l 6|s1r4r9veyRRM)ހ(9:Rj`Qf ʋלn&msy4P>i%e$bSesYF&VybvZPa.@w*iA Ss|AU'|rNM1M"Pe |&ϖ}MͿsS!$T+<Q| k02P ʰ[#Nrj=E~/[ֶPT-@[UAe\vQ 5D1 YBC>LI0%HKc*)y y[p!}u;l< s,@\Zoǣ%2! rۀf)/BFw'u<*$0X` p`4x۴)ry'd'Av_:). =tS]*U$f3+ŧ.MB|R&zaxF ]RNi+V캶`[mLrI}`VlX΄'I2l"ud9L%#3B )"L,ūF̒dbIC(2r@!=02KY]cT:Ww1rV7/q{SQω"J\JpZjBD$8<9WP)׊@"d^aU!'.]I^ʑs\TNS˹1.$jCrZA-?vRk'mIV ܃ uE.̳b)D*ཌU)h}6j7C,O2/!<^EMGm2ux9M q3 Y+rFt5ArF z/Nw-&6+f] P(L5ՙf^sZQ.TE& g3c3Y*2^-CXM5Bu&lfj۰Zuq(uz4jW%w?[ *x%-Ië~>oc66JNi/A nxZHciYUUN2B#Y3oUUăe J@lMepT %zS= e(!MWZ%}zlӪ;Wc%E)FRve&u: m2CJ<*-1"S&Zݸ^op x{Gn358]ã?kjМ" ҉QMs- +0P"5z!Rh4D=(||b9MXxe<^GqDq,G'7q/\.%6!`BLJ;f2Rʃp ڮ^jF]?lL ~-'G%۲4aM^y0G5AabId]0E$Ӕc5FE`o+* _u wQŰ"Pgw31&DJNc2Māާ`*,*Jb,#FESbrǂpC5&j>1볷Fx)=YgEN&15QB3Ћpg1OZ]KOu4Ua$+2qLj p$   NoFyDŴez1yjå y6l`_?=.3nO\-D`ͥ*#r&W$Ic^̷}I/%f]~:N>Q}Iq!>bzݛh(bc2]ўV` 0HtP\ J1u3Kۻ /;ٿJ`ME`[C(X,*ugEYc,>ϱa`|(p%fW$S,f_WHG945>W{nia|i5pՙ*5ҚogSp>yxz|1dQ9[354ߺ_LpKnE@jVɅϑ ׬pϭĸ. ]!`8ǥ`чQtGw0Ł*JfEA-[+p$:J5TŒG``#_1Ke:pgI{r, : *& Og{uçgN޾~9O޼;=D|rv :?EEl ZVy] 4UhB\U:\>Pq' gk@~>^{o#7og4ݩ Vq7)]I\}g_i_r*CCeuVT/0QC1(N1hrH3ea?+ %?+#)ETŇgc:EUSk [TZY=A(qV* i[bvoڽ#3>zHXG8>a\j3XSoFb[&@FiK* Bվ-&۶ܶ<6uW@"R#*=j]VA۪T^U\]UqkԄA,*(!x0lF51"C2zl5ͭ:*/kQb\Lp,}_; 4R׎_?Jn1SQe eۇ\1SɅ2NUƩݯ!q454:zO uI=ֵْ̓$Xa7 vg.fz(ȗ7{كw*4?Wib[^lwUopViaWn?q+5CV b=>+zǜS|[rlY.|h"/ \rmP&qx@ߥ)y;'p*B`1&IdfEXm Z#IZEyԑ# Z#uol>ܣ Lz➤%mnT~v2>Sӭc=vD41hiS~A9 3 5WHr -%& V9jUe:3#Dݺ?" .1!T Vתa"{&CD{fh$Ln9 Q lW@&uJQkK5Okm a.Wo{/_>N3"_o=!QgPG%d%Z5`$/"LawN2o-;^A3]Ma4'W[aTΒdo[Fԍ7A/,iAѴ`We,:u~ S<`sKYnu  +j ] T.փ3uJ 1y;KuId`k.AuDSϘ8PY*nb(RFXp4"/eyc*yϓVEBېEgcx/7p&PwlۀBwWS˶~774{Sòee%\~&-}|>6Ic4|y-w9s^ oEݮ ߪzϤq,\ӄi1Z Jmߥ+ُן5MLQc/ݣkoo_R(B-!!gJ\IgTJxտ^}^gIo<#^RˆD׭ 09X#r2WqCE\hn-N[$b$zp@;#5Pf(ͽp NPM-i6zҦ%mjIZҦ|$rk+sֶ(8Md'=iUɚނP)jmBMmNOލ"IjeBlCc61]+dž/+xSkx k/bUJ`:r͒XK`̷ʸn?Lfu}EU ߢ~ `w۫Ɣt:>@`Uڜ٠/u7."n6Og|Wڜ/u7lfP@[ {9 ?5[D5z Zsƨ5K.IHoGMJָ+A<:ojgN#. ^4eF?cEt`~ LԔ1\j /SXry: DC؞*!}^&(bnJ7 &5Ur2L?tU0(ѴYUslg=CQn6C׼-KUOۃR"۩ڲQYZ[Pα~fYݪ.^ڤg\ihM8oAf46\hI?5X% cG*x_4T4"/C0,Q3mA.Gj0pU_ {0(V x }DW?t7vt}w*a ʚJKe:WRK`L4vtKt3. U1R{T˘jR[u-B7kjQ.E𘍐\c[-A6pl8 mǶcm6p|Uĕf5f9n4n@v_zBι9W:Z8<@rc%gS 3s^F -t09ܮ5,z56ӥXW uHw%EjQڪTϦR)Zw|PkJSEP[-V=jVBhR.>g8-:a^dqv ]le {D2gsux?~?>+ƓW=pTwDqNT1LuDTy]  NL(/0$)GJщ)Kl^Cuy=t5񊁌 v`0P_jg8anUXz>"V5\[kFƫ:rSyoxFz4ubT{1"Ah06zj B1ΥQp,k c!WKM)8FҎY | \KLR50VmZ=fMMpL.h垌θEV)I_çGۇNΖd[Y%t gng^v7-E*AYl(ŧÌG``#_1K#`'ҠKɠh`qW@?|pzw雳?ͻ3L'g``\V:eSz)`;L_O>Vy] 4UhB\U:\>Pq' gk@~>^{o#oOW)HEHw&Xݤt%q} l~9~Ŭ~Y L.}!BSRjHIZEcK!hUgKT>1(N1h42z0}~ПVwHs"~IM~*u`p31~Fƪ5Z^*,žza `8P+ ZA4`XFS{Gf9/X}LDf4?/štPLD'o-^?{WHP/ eyXnh$ϒՅs&1#YPHڢsB6 x]J%O\lsE1lג~ʝd`ПMUhU\E).2 ,K!f0*':evҗ$6\W[/=p{έƹ I9wB?l,`e>Se-l! Y<IKpC_EӨAT<^!v!#IC1LdNX41c"[$Y޲2UΖ|vz^;` SsGlTUABZA)"ʠ!9CbH`D0g,p^[#d%H9%5c%eR˽΅ -zVjUZq6Nz6?2 2AtE E1D悔gQ#*U2'>fqYV&fjQӨIMP]R,tgdf 97 X{n0^Tde[܂=\,^CmU%upJ .D`$jA|Mpz`l^D;2wGtLb~xQJ5i&M=@,`#Hn`IHMC\$m9/uy&|M )*#Xyr%CYP5Y("`{Gޕuy|N2zP\HuL^-I&O^d9sY& qs֐g1J{}^1wvJSY.y/W o?ݲ-BPHP:{KF I@cW`w/rR$~Y`?4Q,,rC| L圈*rW~G]5uE}djYW Hū|萆Q?83G[zvc8pguz|=k\lL7QrЦ80w;Là [i:4ԺKfz}Nl>v5eC-Zv>λb[r==}-0{64?"y|CǝSa'nA`K^s4_4RqCZƽ^eg.ϣн~ *j澸? -X !ܲ 8z77YWEcN 2, /m^`LH"%Gj{yr}; =rBh|hoQ֤wR_.Hs.Ob`Mxj1#|RqM]DϊO <""i#A` TcgMoOwр?q>)X9XMYrAFL.1ٜLk9W3qa^{/ʡ-둇}vSH47Spz7]ˋ?opsx52`!U |I0M4$#\2". p!,[C<1{9kwaYicTRF)!ZQRJ,E㫜xybywcrƴ@r20RH! hc$8+Ho8Ē=pZsZi䴳=0EХpy ~gX;$&̮e?_?~~dDW/BK-?x[Eڿ}N)=7d~F?uh) cVq=Y2F?m!g JK+hB!I?@eV% e9pYN0\ 'jd5hx$tNcrn0`tL kH?f'07>8Nko櫖0(c=&9˭Ez"2 @M륈H" Duŕq"ŞN&v"k8xEV<h@{d,/K ^o$ySBB&%!8}" 2$E]o4`ɡarkwKfPe9ݴB؄MniRpt,ήMwv!˄dz Kl^L?$IÑVOFyd k%,4ѰL,]J ofk U3{nsGV %Ns+nCd:ԁqt4uCVټT О*TWNi;Pך̲SZogwqʡ~TJe%1-*zϠ bpQ,0^!@n` h}4( }՝P8 FA`(hHhFDCX2+AxzoDvfTvY1RȖaU#g3 zNek@"yqKTJ1Ҍ+* t~eHhrёig6*# $2Ml,J9>!=8qQ d)D>K"A8dž,?Zv|s̾u0-bԺ|ZnwZB2i F3`܆>D6pZxDnӑYOU}u z| S+{WyvA)M",ɲoz9Iٛ5ؤ{y U[6S2!˽E-wVTpUϗg3es 82cLq`XbDKˏ V{P;EnԫTjm|6 +i2tyt^ʝ{%K|X>~IE$˨uRsC+:N0`>׌S'c{f>Lp=Xe5R4+k7ڤ혷뱄LdbRz5ZmPiIYA@'=z+Zy]W<pݠi?8|W/{w9(ZKU'O'] ,!##E;θOUªVjA;%lW؀tqFF:J2cG*OI\ dFMr" JϳV2ʼaJm4YWoH" xTY!/;6̞ 3g1mȒGx)vf[-Vt؎MYUEVH0QP8LBJ %-,T(睳D"O|A}\bϢCgZKЙ|ARD2HsF{@"2kj X%3Ei?qheݨ grkDK5IQ$$%痲2n\(tSy42}Rƞ*&%y??!P"F5*dtqO'z4{T2 z+j:ʛ 0ՑܱD`rӷ#\R'G ݇'Xn暫TuN\8~_?Ͽx Z !qu$X5 mCROl5MuMFl5cMz2krMOՇW:0qwcwT?$MDAW^d_rhRRTq:KA 2 P.הZ\ \1VH{Ώ1[S %;ey1IL{Q1nEM2-?N}|+FW[=tηZ|[Y* [!̊kņr$"Th顰RJ)AmnRV PCMykCNfڱ:>RK$p=GT @7Pu.MQCedtVR2ʘ'QGkC\9L( Mlգ9" nE;jhX;ķe\|rӆ|ev+H@)DN;&$&J6aTGiM62%veFJ' yBQ7@bZ}Bp (1IL8ٖ]z@Ɩ5a}M׬iyɴb Pwۜ|4X =ӌ:k"&DHFE+=7wAr s+n{\=tƎ:wWlpPBNo$c ^7?wi_vf4+gy/ıG,vuL`WŨČi#SBA*A(;Tt=©xQqӻ #)W\L%#-T@l᥷$A0u`Z:]tNg9P"vC2D'R(TB8K{EͤDH<a U[k4OY[Z{ uMC9c__g(E y ɴ- ,| `ٵ W*#>"Yo@xN (j&x3kj @TJ/ޅf8zׂ33tcyTJp2 H!B֖ eV1'8ÁHT ƹDlSU`V_]үh16ۄ]6;ڻpꝍJ5FtlXT&o ,ZGp Xq6c3f85c2(P R _Ɯq3=;Y}QW'㥏Nف2ei>>~ w 8]I,ׅ&y<˕7uZ58]s69P(.P (F8-(K$Fͧ$\B$ʝ^c9P4,ӪWWJ^) )l^k/+M-0xbj4=ě7=m֦õ |2 q ٌΐ RyEsivQyhMw6A_;OJͯfWM/o<=o›!xMmǼ̣YL&ix̃&;ޯ[\hN?}uӜ"٧~+e7W_: Y֪(Jpp&5/U=WD?}EQg,>4nQ۔I`7v~Isq%Β 5̔Gm4OݫTtO m-:8iUX4O/Z znt (eprkSL?iP}ۂC2.UۧAȓav̛=iN=Yv*a^,6|`'ܻX偻^TGEyDI[&*l2 k.0$Y=$yk0v üe: IQxOòAd磩V3g$N,ჃI{&a.3M>Kݗ/L#aSlDz4@"AkMLhBkIol;g;;?r=oet1yOʘ8e[L0Rd=II8 QQRZ؈j׆@6_PXSUz(LVY> whܮsT0G3Jx%f6h J 4X# @D5J5x C1 ?""1h2.HVUzU F11&)pr@P-%QHbZ{ ĩ$$ڲZ;"#Xֵ=y5WNN(`a6d8Pov\RasK_eMDWBD ι/@;XR LyUլ Nu:@sJ>K,h`SFEL1Vz#0oZ7S׺F^AqfußL/BKmV!-;Es5"OENg%EY;aq< 3=>Wz?mC#S3d^0fBKɢX j7׷o7'mBu_z0ߎ`EvNTǥnz܊ZeOBbߍp}S-.9-Hы77*woYFCa!˫\f G; Wxa[eE{tU#{l_&!H<N+c޵q,׿ЧH~06qD0)1vRnӳp)jFH\TWWӧKUSMR]E$JiwVd}i)BJdVT_7UUN%wΥ*bOXc1K9{; =];H>} ڞ:m5F42=.D"  ំՅtu>xJ )< }#2},WPC3FDBɃ3+Bl 5zZk_f=`2 p2~ ǒPԗ\AǃNb/>hX_ z,ݕv_ I]:-k&Jc6wNn?~p'ujlXu7XŶmEEEX໷__|@4.>./L=X}@ɺ؟# ޱr"y~ٗ"C 7#VS?PN9:gu^$4_픹\C`F|ϡm02UMŹEsv%g1֚DZꂔ#Cw72S"۵k%k3纱lF}n]\pw;;)DthyCk'GQ ^]l}6>"|@(G}1;Nz#F nƼh~WV Dp<|d~;_os1`roל0G6 9H%KniW A,"7+#Y,;]\'<&{U\'NY}\vb<0-^d6`u"+e- 9(d!36`Y,EbZMH Ǝ!gs-hK~\hp4)geKeHxoEyU "Uh#1eZvlrkZy} Ÿrت̪,.0ktU(U1'Zry@,3!ɌaBhƬ:ȘD9~+C`fkg_`‘f|oYR̳E@Kcc˻XY3rM40d I(oL tSȆլ2I_~N .Q3Dti7h,X BQ=;BS;Qu$$E>/)`pejDyYE^445J1 RN5nʰkQMk?fR3&"h0tj$ 'W^Ko+fUy' c@JwH9r: g0Æ -'*MNV˚zޅLP6yd-̤vL%T4U[V%^E H7 X L-ɠ<Π;l _jTcA#zPr (1de+f胕EJAJ燍Uc4nh=dh)ЭL( 2`G[}ƂQ,TG''GbiV6SRe%) CdDOK x1* 7Db6y.e}trQV]k)d2{Ex4VGm` @;k1#,PxW#3K:\V 9GŌ:v:k-ȊFyN ВH[ Y[cvXT5Ip쀱` Ü@#8U2ӉTIXs M{mtTa,X#5Ѐ7T*[fy/z+렱^Jw afs؀$>wNV%TaҹdJac-A+NӮiקeʹ1AMY"k W@7]pd3 L[ [LSpR(u<.pkHk sQS9% on0x~3<#}QaA][wäD,T@s1%1CK6ŹrI*Bј2R ; #*|DHz;`ƭz 떍M vV"bj ϕ+"$r iQyMaeNPhQ0RXjd'UU:@1Es!s6,U;A'#80`ci 3725H 5j*\#|r\ΤXLGjJҸh ҵaϚv2kP*m^5ʾaw*@`H Q'X:dС 5!W iDF9|%#p<{ xMfPO鐣,6 \mt+淎< .Z6l!&\Ko&b!] 124+&)K`$9D%. p\N&J#:KckT{"!w*;c70&`j?zЯuu7V+0*#r-߶zPZˆwNE)Eo.jnf7Y#j?vo]c~Nbsgt65|6bsՖ֞VEboXl~[ l9)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@߯pfG d8Jl@}o; +&%CJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%r*% w{dZ(ֿ-m~ſ.\]/~~»݇pѝ.-O`Շew?sY~Ŕ:YlnfTye, $G>*\W=8_e,}ޤW?5[rvgb?<7ӛ|Ѱ9'e[^Z~XqIoGO>A0nA/w!^wgG' k=l(1?luuY߫ܿ7t.N7-ap <Ί  jՕ|P<ܾU~e|D2.NʎtpQfN' `rz9\[IV'k+tp1^i&0%֘&B<\D>R)6I"M:޽rM~w5m (/>!МOVہ #kybFv=tyk8M뮽?6@aw`Σ+4!=ŧ|{> S݅gz;wH}2A1O\D_Q")<"83eEht9H\]zbе#ƞ =StU;y0SSؽ0.>!0SgGnevggS$ <[y/;ks>UΧs/ndXwz%G{aw&pM^w/+=by:+gB?ss:*UҾ{ߙ7_`܉|p7?+^0}0v]S$|bڭtn-o7bK~t- WS.*ڊіgZ8B!dY\{@n aT8pU rDER-NWUUۻ[6V[l;kaUE>Ӓ)E :%GȔnKleSЋeR1EYJ6;˜_05dɱ"b,4]~14[IݒK\@I%Xr0mWirOiR_vfչoW0K;W]qXwIOSX~믿pmL0ФT,S#dZ[YPLx!ouRTęyRE1xJh4[%bVK c?!$ ߁ht?#B)c F= 2K0 ,,y~{̛f|>,·|o&>>^.Jq(vaO1y0IAT{tUcr--Ҹ[xǬ~f)c+{hbE͇nbJFRheJ%*,#0'ڇ`bXъ"c`YG &J2{Q9~Da##nJw-C)Eg9OFRȩH BF bڔ%-'h(C RjB䐊ND [3 NTf dQ C8Fi*@W4!iZTKcD{/1052XÈ9DEeyޜ^Z£[CFgK˴GZU&@ M@ D-tGXU İ#NQ$=E LɃX#}q[G&Ѡs,* oʋF^b`Ҁ S*l: 燐`;Ky_Td* dMŀJkaw9010g<,;M&x]k#YHف |^}~m_#f H{]ㇺr#_G'_)~y=AKψ3yme\G^k<΁|Tۥ"q/yq?`OH6=M_ 3 (cQ-P`tX@g<>tYxO'1EZrJ ܏}.q/yx(/a0MY)i|ax3w-J~/S˫ y.}YuPE([.e8=kW}z4C6YQ iT:LuMA(( 'T'zӪQEYim:etr>ꦕRkk, Yv^D$i0/RvuMt^[ RyeL5iS ORV8dÒID F;0P`b.5񮓱3qpa|uZiAݎϿ/ u0 ϡ|j[S}k#wӑ=pkӨ.oZ#TO|n&=.lkR~6kyp0ɯsZ\Vs7澟A+X1,S忇5 =->n{M-rjE[@ze! -FBr`HkԿ/.3;l0-@,/aQB6ƒ EiXLw?\x:j=\s#BCf{o`FJ SSaa)mZL8K)xtv;Vw3N1wxgO{hswIoہ߾P/Ѷv",f%AA.(S1MX^Nb◒f{0KDf* [j~ŽìAn̿I rF1T%1%L$')I}@_&;w/[︁}EKޤ(@Iӽ"E]KJ 6j0ɂKbё`9/DtFk+F+}NV?"y}gI徴y'!_k![gࣳOds T}RQ>]ͽ.?os^+UJ7zt?)Ւzx[ܧPD7g&]^? ]L8 Zgкx:+[o9wt:kbvjB Zf6vyz7yoB07}=p~Mx~4;:N#kN_ڴi.nM%mG OjVx%_: Dx l$xaޤ`g0g~ C?t>g öR Ŷvy>Q ~̪\agږ[jIhy"Sd *g|>wav@;z&XPD9N]`Fa ˽)tk-f\&;^up6]+R^OЦ컝@lMCdĵ{޺/=_imk^(֢tP SN$*xyQع64kQt5CCRiW$a iU3ؠL1h L> ('@ׂ=rA4 [su2+10BB2UzHLdu4(&tr2Vb$NҎ)i-aG.A}aQܻ`[H.}zR /C; G%e%Z5`d:p"LawN o+[WbToS 52MvuK8K(vـo;4Dn&j4g7{%vu&.yS,/_dMڍ [2|*d|ʵ[xr{C^~Lw __ݫ##z!U@ՑH>H=0+C֭%ﭬEjLgзmy´׀NcxR⚼Q`Nn:9K99ؠ;Yu}ͦʟsaAɯMnsohA݅gRU۪˗wA2;tbYwZ#W'N(^X(採N&xyY1հtSՆItפRۆ}{B'ْmcK2]5ZT՛]vxWz <{w[VtWc|}G-Wy?N~\fY5<򔪬ih Uo: f[BbMI=SÊ.;Be&:׾k1 uޮxpՠ;q88i7k=a)ZϞϺ L!]"DZkR<#`r-D+&4f1i%s!G3d5CoX>ea苲oyң>ylV݌0i$+:bE`A08 ӊneKX u+@0!S&pZFe)A@䭗Gg-'H6Ѧ3>? GLTL|4%F{WH65O a*#i`إrؿb/yįӇ},uNĿqowxO?}D}|_o?;: LpU7oLfpֆ?y[Cxӡb &ͷd\+׌.>T n-gZȑ"-0=H;`orݗJr2Wl=%+eNcMQlVկu~~<Y\?5t5*>4Jˏ0zkF|Mz6U_c!"C<  g}MOmmhbrmVTw^HvI0\F``_.ocVg &3nI(IQZg$L`oۨCH"z$!N6(( +kg" 6(Hb8uYg?Cr Cps̺ZmAZu 6f$[`E-zƢVCIj]A:7TNZZdT[\Gܜoukpȷ:LvVV#XmPQS9OR}S%vZ^?p~u2OC^+?. /)_%8\Fs{'9M߰2*؍\to~q'jAy7,yvRʫ3QB,5v>dVH Q6ۢ)Y%h^Hș%czPWBUa `kZ,Y3r 26j-Zu$a9'3ɨF-J hhuc"j@̵Y0`NZz 9}EM]4^d)&~uI+G5)ʎPT< zhA8v`'݊mlAGPSKXWs<*@Tݘ'Է`OC5hRR$KҲՒQx2@ph+0vCs|NЋiU1_,~9! _5Kn0(7&˗N3^8]V^X0"tqi32uEeՁ@U0+fn}GM [Xu٭uiyȲAB,)fs;PB/|2Ȣi%kd&"Ă6yC%%WAGjwY% ~._O.ƩGUS>g3{HhtӼȽ^gk0VÀ;*ή1{hb*}PjC]]&ݡF{Դ* Ձ AXtLy BK<˹CC%׽Mʔ h첄A#C!QA8`灩n>SY![)g- ˘ x5#" h#ZjxM:g쌿뱞5dݍǂvmw g=x}s Ԫċ[SϺY]$[tX;:BHHcϑE?=x3kv@x'',M('/0b3V'kA#[L\DJ90Bv^E*oU,1 PoR!TXkHfvAػV|b|VۻDZm.uA^BH쥴ٛ0R K9L:0(B-~y}r~>+ VQ7S[{m$e(}}WJ>w.wwwD%\qgc̕hBA@D} RP%5 cE$ږ؛7Pi7o` [rIل6ԍXD`εo+"tsj{sv!C`0xʲDBŘoӅ47h¸g ´+cR6!EdR{rc r0(s矞-{];x_Gxm&,r7aSoT/1~ )?^޾]4֦ʘyd& CŪPA_h*y *"%%Bah7dkMfhW`y;,*W|=8KIkA˒_..!$B[%4U 2F:ml jWAE3S0 -D MflQmm*wRj\Ufuu;M.Cka׷PٝoJr\ k\NgC/jrκ$cCŁ-yEozr>IŚ*tdӁ ʦ.,ȏ9P_t8r>EPJ&Ul ig$AcFp*('yesڨжŧڶ9 "}H68'2&)JQy9I Xv[E,a|1AȏxyMS@lR*P уIGRcש Q|} :Qj܉RqUcJeM#P6eecHyY Z7jEΖ5_BSSby'B~fuvXj&ْ ;$h$~vQp Mk;/tK.Ɨ;1`((ˤS / IBxS(&Iț{踨|0_^1lYgYJo^E2H2IUoJ4Ϋ;"Xc\NJ7}g6mUg<;Pa"WqV󷹏_G'e3I O ~_}{uȫ|0E8Kv>tMM]5B#܍ZKnTraEO)2Y|+<{N^='-k|?)\zЛμc6"wUҋp/^^/~GF_../X$Y]LΦs`GcFވV8܋GGW~b<-ßOëdY{EBA!N CIQ質1ys#> M6|PmrjkdVJxPq=?T ̣;?GolzϤ1)m^~Beݴ_|5I[|mPoyws3o*dx=3]U=ؼC=p<91c Szmd{aQ? AvVZ`2@|@(cȂ4i9) F=Vz$hI0 U[$gz8_! NFC&u}YD"eG?QI2Ii LMO}U_UWWkRBI'\fDB3(/]my]C Q)P͘2pcX>G2V "9T`}fa|y2uj9Z:9nuQ)QI}yfvz,^cmte-nZ$n,dz<2 .+OˀxX8_Lnjh.2/<ρ˚\-0̹ȺE\X-(+wxŘucZz&Nq Km6wNW/dC޶ W0=>)KnUG<ݢњ8j]qnhDO,c)GJ?O|~Cl".O.V8b~z2:&KU`&u=횫'XwJ3κ;i׶klTmvܕ âӁIW˧ow\83IKЀsIe3ZϢ1TˤE3 (5m9{#ECh> ov,j_j9,qoc#h[G+]\{-jho6:*sBXzݖ0%{+=[s{)Q{Ͼ½g-*;L\tqHw 'S`Z lDɤ͌-_f{u3b=[絫n1cupK\v^=Kh[>zԓ|r 9)i:A>ye(ႋp-tR,$GREs֙؞DI3NoTw3(I *AZKu42$ fC2k4!)9nS< E{ivW׃ o Aq͑iɼf9b,JS)9DDFR\=֭Ejsu54z:gJ)썛K.MƧ`!2O<HiYƞY!2+^\)w_~BJU'0n/R-B͕و9K:WrNdI:q޻T0r!}`yr|R2/@<"\0Yn((t:HL\387 ߻= ^ƟM~ŒC(;cu]bVy&-Zy7>[~*Z">tt~t1Ŏzbs\ڐGFٖoߞ/u@U` ^$u]rrEs[N!_qN.$]RعwO;ˬZ9o7ϒa`*̅ϽhlލLTM1?^:=J(+\|eK/#%_zYe@@Y(2œ_{]{95jĴ𪂭&]uٽ"2&)>7_S&N \&KرrRWTN1:Y?W礎~|?/?||z>=y S pLkw៻@ K/eSM. ׸Mx׹i\!|@پ:톩o͟A(e[z̑yMtsݏ8 M0 u0>E-?;pkbClG,W1VHfՎ|jyVTustc8)wo{8M} D"Y1 ۥk78$:Λ;=rmxP@`RNdӡXsiΎLsUCC̐x٪$q~T9=[T-z \vI*WmL',Wr6vlLE{.R)ըmފhV,\+6T[Rf"O*RV9jhk''1?yC{ǿ" .XJ1CN((_&!9rH'pz<ki2Z#(DW&$1+ -eq :# i:ה8ӹbC)LגqxQ_|hjvK3Ye İ1dmSbDeY0Dt8E}q#Enc]xoVaƻ' Zy*Lfe~5pDL%E)煍̱@ 2NhG3QXa֖ Ad d6N (q֘8kvjL E--$ϭy<3A&ER(c Esx*+D|D--cC޳Vje3"=ȳbP5 7Xc6P-jja8i.m~e!tHCq h G CP)+#O.dMk815Y֔Nɲ`㦓"K1K`,zIue!3kE,O7Z5!JEi&Fzp(kLE4 ɒ5a d щ胷R .gQG(jI %M(-ri[x4)1 0:5&Aoű;(~x":v1~(5(e'dO&*a/6lb iTQ1! ٨h:o4{z8EgZ(44?Ow=7Xt:SӅ:_S3J?t:M\9_GUd_f4\ͼ۶oQ@įj;U5zPKh7Z\iKTfMJeNXu#{nﺙ\﷽fO{ 4lմ]}8"sUVc*Z8tső3&Zs ͕@GdH`hUWc1WEZn3F5W\6< x< J8sEJ!\) kk4WRjCLmvNعkr5n~&WW&L`ɷ\?twH٣38=dķBҾ[7JX% ,:3]Gi슔 [3 ʹ`8"s%%#2WE} xH)[V*͕R=qwԚIBQʟFot_۷3޽q:k+0у BS>e3S8 shlV9HF"jrhye EgÍNibwl! 1,J G :Q8UPn }C6zݛGiХݛ;.D ;>%wnXABʕ|?QFf}n+5pf K{ 2ݤQ2H?n%$mb,nCH!!)4BG 4)AZ{ ܢu;$nCgpkMFAZMqDgO^)I1>BNFD&0b@iqF#͕Lv.;l礔.?.hf!9+mtR(KP9a'MfjB^iyerI1a2-TBG9pt)ɸO6TR X;0b3(b|2thd"-ڵYѼUm.SK˝lj=~_!HÃ/ dS7| <+egIe5ĭ7>M #Tƒ-xf|7uаS!4is8X(gm=7Xl3(/]i[akͥ[KL+*xDR1I0]%fʿB $Yx&;2 Oab"$e[U%)J,Y܂Yvt0ؘa#G; Zfd1o>x+]!tiIbeGȇx՝cOݍ2 w FϤw9bJ-fE<ՎVx9a;Ľq! 5^zv6;-xCzʋѢfo%Iw@S&}LsbV`Y)ʉlL@ADRC1pM$?ۿhڇ-+uWe`F)Tk.C+L11Iw=!%9nNV~=Džڴ pa{{xD>|0ܶ|S$N2}^ 1.|?P|0_vLZx6$S~9E㑁;h\?y]OpO0 "ʑ^aȜ Ff뜾tE25?,q[{s~;iczNëorhg_ڬe#i3ҁ%$M$㜩Ҧ$+Hõ*+mni`xq3ҟ;?7\ϖn*g[r ̹2bo GߞzsG8~Zܶv3)}iD4Zi6bQe$GW]9<w,,5nUݳ5kZ"r+)kB?M }9##~O`8}ANc1p=Ë3Bw~z?|Û}>~~CRws$N@p`~|5e55ͷZcj!jz 51#5[Z֖|?˛qq8%juٞCkIV(]] >B`Z-T tDSBt)]~i}Nm޴1vH7‡nlEj&98+o0J}{1`YA3xRdN' uNY?Cr:>@2x!<I&dDR+A 8)^ ġ7꾵wdş=$GIIFjHe&i"o9[eaޑ 3fɭ4BU9:j8/!-ǻ`;(jQEJ)5yx+w[ZtڢVOo,Jges\5ػ,(,]kêrRYS j03ek֮!17њcTYهnO]Mb^S]@,e$%*Y/" Bi3H,ɵ%k}`kKk9@)sBXqnKrJNrG_g;UKH xk[]k'gR9 Hf*B~J XJTaN&tq; \r jQnC+ә, y '(bM&)eJ8H,AERU:,{i1N,cJ8hc(2q-RɈ#TjIgw`sl@RbH,d5z"f%U B NuwgdTNhE &HV<YJd.Hx "R-sczQq~-x!g*FC—l2ڇ袔gAzjpPG@N> 3#}(f `bOm<"[ +EώCOV&U8L54)\MܐՂ|M+o!Q6bfJߎZ7 ~8|5XWb^k㭶Y$LN7}3}WJXx9m`)L, "dW\i1~@ȖkAbaOx%rTO޶AIAN` edNplCvoe]i 7/44?;`3Y`Я<(RYUYk/\8CV,> 11a/\UP)U3^r071& ,5jD5:s9MNOƽ;mȰZ{N̴=B׎wҽUwgC)#K^N1C2y-/߹d$Lr:pP% 6v;6=dWrZt ǃ9=WϤL&l i[>GmtŻb+oK->(XkE^[1h3eg"}=V5Bmqatm ( '2@GSu&#\Smٜťf3рJ娴k> :Ψ@{ WQ&H"D%EuSu"ծ.9Y*3kk 0C:Rq%U*B\,(Ǹ92^:Pĸq1S&5\_OJ`~shzmD3޹gDz4'qHv)cTSq烄{q`7[Bc{1%#J`6%ht;A¹9KT r$kUK`UDAFL( dZ,9DdTd.^ Cb&2>3W98{THr".fflۡ♽wbXMI;ƽ^1 9(r-9Oo%n6H-Rpt\;sNG咱! H *GV;8wSV=q 5]U{ϖ?[/aS,Ʋ̉G()t~9ۧO Ə8. &\6['҆E Rƹ`e',I8Ay;8^Uڻ L઱zQ<$O]2B{ǕB.2KKҜϤKt0%+ާF]ݿ wBqMr?|TC~y(E2{Svm!kTgLtI2{MpVҥr*FMi EvuIk^~KEU BjCqt 2jcmZ;-r՗@#ʴ'̴TjdZ=o7O@ZBjg\%='jol9:>r|ag(RpK.i52,wmj)e#El[͓r6.BAz "yLFzεTmX 9n-Ixa5x(/ʼz^p QYx8;,n^ûfgW<ھ?M> 5,Eb$ AHQ84ߨ`T%cP+3jd-K҉jPX@sE\%Z" i X2R։E2z_¾r_4 11c֨wX.U"B )t !FRą[bI0Ac=ScRڳG u_W9,jGpq1Q" >aࠪô_ghf9iJ}tQ7J;xaR 91xOԇKy.pXFȦIkJ_H+͖:H2k#Lݸj]*m"Ҋ`xug9!0#mg` $xZq<~1 =W(8 Gd$k-LlDPD/$OD'ҕ㲐*wn;w =|y:K2B.0Ȝ@/wP )4҅fB4U\ cA3yM0DG-@ %01`I>f'0>8}'18,#{XC| yGŭ &$aFX7Yi)4vD46XθU{ٴ]gUݼK*nΠD@)aHZL͗D O`h]f24;Ӑ9E3^TYԳWq]}!gzؑ_1 LǼo3 b<Y$cMG},o%ْd9`馩j6UUO+^Ǵyup|f˛mJjҺL 4 v1ka8RW V4JX,4ѰLA XX꟯ ^j}էn㘴=gI['ηN.rNP> KXUōYj!'IoC}B,L)F!"b^J{X ݬo·I7`4C<5rTmqҴ==<Ĵoe0Cg3'87U?x~nP#]6P /ox\xMB3u;NﯿU*س gT,JSҋV2d[;V=sE{݇{9 ӏ.־|ѽu:o μ6tf|?,ZރovmvR?nYw~m_xQ 4&}2ߎ}y=_P@/h6^|ߋz3uy|1^vFXX}Xxe1̩PbFzm}rRY^Go lr6'XΆ,-d& K C@E@- :gefj <- 3OsqXbϷ-]wݻǍ F&9͍/vK=A?I nnf7wO9lÕ lͶCjPn'=?4X١祖CHgeDdηt3#CROr6T2Z[$t\Aтc\+Iƻ!+Otƽ| ǀ{o軾>!f߬?<-`PHP,{ B0^ ETc 8(N>H;#q1^4'!dԂKGg P8Wbw6#Ys  /)PM#(QkC*CʒI2 tiA$oe⥺:2}f8;ics3퍄,}y3ͅx62.׍-Nfml϶*v%-xES/Qh/1`4Wf XFfT.;*q' &[rH'  Ĩ_RgJ/Ov=`2\uo`ɶhsx ~(&juy$E&WBkfM5?*r& "!}\0M˲PCm.RlV38œR_+h<7yg;k\;Vzm|vcr}6g&E͵6 k١FrxZh+_ dMorH;&MMp^?&ӾP 4Q0hR!d \\36٣Öv|ސe`2p e2L112" %bVx_CÞ_:1g9cAwY."BIs#E$ .Ho8`Izc^zM5mvHsr3Kч)DOseQע -:q.$a)C? eՒK]=z&Z5HP oJ|ㅎM{'Ʒ 7&er29κ<_"Q(. hk=wɂ8@V& 'T6H+-@oV@]2#M)|8i\qb,ǔe# g`*Sj7wx;).7~[Z`O(=M.EғƶFw \ʭw| <Q"i-JI'cTDnlEI*`B J:&!6DxE+1l)5t4#s\^M'csqG5, sV@ mGM!I_4CBXB"5r1cbd@nGM "XAR.")"7>8UNKU;1ɷ\ #{HM,xY|n@4^& ˜@(%uŕq"bc'c%.Om] FhӢ m/Ɔà{q$1폥8 '܆W ?74>i5:Q$P eH)C醌@_jm1N:{zu掠#c.1y!f6ʈ0MLz: ׍B߇/mˤk6i iYސ=;fZfؤY ÑQVb >e ,^p=z R|_%_*W뫟>-ǤDJJ58c2f+nK Q2@YM]T )G|,+ u.YP0;Z0  YTvTgUh3eS ̛m 6:(1~ϡPڊV,o=9qF>BMfvwxWw5u,z ,&7ZO%sx/z8}p" E铨} 1197!)Nzb)fbTB(RP*fGUP F T)aއ[yj԰'Q68iZٞbb:fA=t+KN0@MIf=Kl%NY@'[6ɔM@nZ.iu,= (  /F{dqEeuŒ\"6.{(?6TmNޓ`S`jPKfzs4ɖNlVy4eC-46wixx{z^jy>*Y-nB}x]ND+9|K"Oe\y7O/ػ[jӬ9}GTo8zH9gZ-][ʩN}ŲgqvOݗ'dJSwZd/| cۧ46ЋXKӿ\Ln {qx aav`/4FO\# xr㼵 SMH7AQqQqQqQBhFa9H,k^ 999SeE*lY[i_/GgȽ:527n,"0Bi:ङ8hhK>Rͱ?<8du}iN\܍*`ӔUqx2M4yY#9~KN4PE bgɢ')`&ť 1ĤW'ꙵ 5)7"N 5#2$ aRdcjlǦ&O,`tt5>;Q/ΏO94߫ܶsl y(1hmjД50!x.CBS B743up+o坃7bOΟ:.7&Cٗ+(v\o{H© H{MiD\Mk1JIŞdل7AEfʶg~(rsIf*xQq ?}* b[![fO.?n3^ŋŤ|,KKu~i&@i )KnCLdG tG "y+id}jT[κb a3Ĝ1n,m<$Hsp{t;FnMυ#NN4)*ZM7'7{;y^A+ewBywaxCZբS%*/8QF3{xȲ5C.XRX)6u˫Sǔu픭tPRkA."{w!+5ָFOs>~;UxҽqQMM'Y o.-D+If<=sp"͙Ut |>Q{,Q)4֔qs.TZڬ5I^ff7͠Iq؟V:8p6i g6ib d`Ig3bp1IMPЇa1(P#tg^f fA޺+eٺ[#}i++vBߋ"+¦_N푥Pb^G P bH5vJ2HR5NXw>х<@ɷ@t͏gW[]5bK<9˘=st#eL\B4FrELKj RrCURլJY5t@,a[]ms+SKK[l,Y8[nJcgmn-؞ۙS@*jnZ؄ A2 c*5(K)µ#o8z<wFN:ߢ~ݰP~J0>[Uh]5]],bZ.?;o)nȽf<(L0!XHW$I) "2myIZJ78{*/)Y9ْszG@ꉔFd"$*R@ hJb,A"A$ T*}SꏮH%3aew-mDapݭIDѳ"L( QxX g]psrdO·nڿij#HVRe$ i "A$U`sLFyDt'H Mœ0'}s67#.r\سW";Rx/EDއi1 (!qMR5cz) Ը `&?{_\fOr)L˯S-#"LL{zX3S8HNQbzf2 'vpޛLA @O%P0 VVt0! H ˥CRj;KjUD>ݜ;L?ha#uyqj"}2Y`zrzjgI%ՂjH:sނ.g/EW%33(W c.S~ߪ'/gMpEsbq- 6EԆΒ"?`s e=1WNVuCVv9^Ms 0eL\L|4=Y?O 4^ Zlz]5VHu6LFR?Gc>E.FWz,5FUcapxg/͋Ϗ1QLJ?x V4"AIm _󮺆f]SZtMHަ_WP}١mDٗC?q)|wҦjVaC2C_Wl-W7en;Y(7nuvm)ok}hzCx˘-rcQhR}~yk12K?+VwHsIM ֏/O@c|Nƪ5Z^*,žza 8a6 #+X|*Q'U_o+( %}CFR~˫a̳7)EL p{s/ jl/3O{WN;9~MwQ5~P?up?u04?LI|ˢ:uVm^^v X9xcsR  }WH8Mj+ O{11CoG_fș!,*FU:;QǥG.Q`yf+̄O;b!"&.1*څX:)bt!-.u)}|)㎮+ZD7NkƼ4ā-z W~Z d5^[Kq[YZ鄒1kPҠmPV# $0LzbqC);Iҏܲ{ ڷT8r@X1犹(up\S4W^hj4UDyf08+F 03;Lra3 6c9EA5YGI V21K,#GF"H NKT JBB$#hQ*RF,E:i)g۷?"6ɮY:ږ(J,v)ǃ$.2ʜJ8W "Yl^nŘ 3I;R>}/V-R%(`d8 Kk(i6jm}`6)wP3妃.mPnmZb1͢Y*=g`+$ S 31!)4zDqlWZBWk;IKX /d(pcvR T#,7X൴4$7aUЖR4ɶ_gKG;⡭8&)j(x`B,c^KPD„\3 3Rxt's0VHGg+:gdXՕn^)+A_)oKFi}_J6EF T"tHeBKwJ{ \ 5V`u(QN Ab07G8Pa?84x&Ue4E>3T kM;:(*w`Tj;3a,B\%7-St!<i&oAn2+^LU3O1X9biKKe$"_jY"M{$fuZ.]\x [b 1T)vW逰 i#Wk/;P"p*{jL\idgFgͽ EqQCf]pg>d+&g=߽VM}57o<ѾFZiwU9ӛσ۪<_P P^M ČfLʔ: Ĉw֦ofLLLljtvn P>wϮ6[?C:=V)n!esVAvro0NМܢY|g u\h6nЪO׊MHu}.ȉ9h%.SRAA!+#a ;,І3{>ACyΆQmNU"m)&69LRcNsPI{돊M X-WWMe8 Wq~: uD%FoXL0&}~1D5ܦ5{h$e4KC_W EV%'te,!]Znu '1bePeKQ3ګT2|?iofs%m$zUUɻ5|#YmOVm},W+jfrU@{i(r4g\3svRFx_WXOtfx4#-vo7j8+ɻ@zu^Bl7J:S3NN!WgҸTN汐A?]-ǝ !kr-*1{wJڽ*gM׽KSnDv0^zq?LӔg0M+&rFfy=jE*OaqTCEݱ ͟jW-փa6zGatj؎Qۍ^he4WBhagҩ:6^g7(Ø/8S\ 3~6<\&/m^:eLp|.J^NsnSOXxG/ 7Hȃ8_GS\5EE_JQtBƁxdabY295qNoYBA}oCX LtH:R CdZc:W:/6^Li=+Zq?()9}~~l+?\GU%%z8(DBa-'DFM'r*e4P+v5$rRqrEeVh` є`:OLRqAǩZ`0 O·.Fxq1?=3Obz•zٓv[E̲B fYni3*Ļb)v3{32/r6ɭ#ol|TJܧ9x4>yGlGp<D?*Mc"Ev.T\QÅ#AJ,R<\!"}}ehp  1%0pI3rG#P(~Z҅icP-0Æroi&㟼+Ls Ypg{HW!O #a6yqٽzA aq(CUσCRbS1 ʚivW]V=* ,+:kgր{cTj39ml/YAo/ġir׳r- zeT ]r޿9CY:>^,-ʵ_\.33C!$gcȸGz@5#<,i%OأSh:@?ǏI!֛qHݻD@Lg~0y ϳ|V?'Q0bQ`Y+`?#"b1h#gֽ7.ղhh7CܱYnsOs4mܽ;2Xk>;>*:/U`tY/7ܜ,udNG&=.s2 *тi5ݢi"G\>\V5 f/;u/ru~d≅be/z0;ɔn#A&57lB4 2͡L7w72ZܝxmiC $%TޖpIj70iJabJИ% 5X;4[\ms'mz:܈z4<0Ii3 ?nvo ocJ)k6QeE2#sa7V]bPTٯ8Nx3N2FQy q|J4}GEqTU[˙)jLQȴEj( C"Î8qZY|\ީfش힎7sXn٘Qx/y\sƶfggM'ґ=[UU衚zI;7vR>HL0ǐ} >[,=8=0ד!XǤ.ۏ !`t!.p0 <2,RJ0M/.9P>Tl0X6Lr໿.0n|xeY[va?[1{~PlPKP~Q[v-.ѕ=Mi82?rྨiC{tPkl59ʷ Oj Qz4z$4*X*ș D{Ϣi/$U T3EQM - . uDz'M<5Op.w=e@0Guylu8mZcGz4uSuĊ A^ H4G=aZq0 ~Xf]cG:Rb1!S&aJ;fK)ƒ%wkY5l'tr\_< +%AtELS"2myo@|BF7%@;hf~¯;RO4:&!(lfW  Ay, X1TBPUh +;)=YgEN&QA" p΂m1},{? B=?iEHJYd V$7sctP682ai/Hy1y2?|L ( &v~=r\TpP xs)j""l0RU/ 1Lhc}J0ƅ?N1r߫!N{ "{䩺:](bw*SiCv8)85:(3Sʎn(0Es gE85iatH\b6-L]4E[.qUW ca>; K;nצR$,̾iZ."\*ZPI'lZm"?4 ؊K0!azRu8aLMA<*\k}]u㻛es28[C"u%折+u8zn^RXmͬZhx{iV֍ĸ#]u C:w,A}O*3f1YxtNtsx5mx8Q\;*AGmkԮgdQ˴fZIs`M.u}S?gx05i9TOl8ooC|?w?D]ۋYp)1&}h _o}hs CSvOL.㪜1}bsZ ?|n.mtm4T?iia5u| <,颢owzIݕT 8 !=d[>FitCǪ]̖T1qQz;UI+-}e~ѵ0, [G!=Rr'5Sg~jT@m;u"kV{{!SV* AWXyz#~.*Q#$'ѓUp&`AM822@O)Tcqe6xfGn$$ϦG%?v)IжgQSޜjuS ['JQ<ߪ9"MEΐP$կM>9ۀꥧ/>߼өR7~+L&T Oj2ݏ`N>ʲTB=Qamy" 32F@VrVrvoJyNz&.TIEp!w ҄po6MWw=is ½ž;@nh^~/U~SA/q;@ڻ6|gv3}  >^0DP{h{׍u~6n܂76'5Aoc`? #8>=ϔ *BDLD]`uI( pRCZ>t-}LE7i")vڥ+/ q`<>mA\Imi0HD xm-őާ %%c88/LPҠmPV# $0L0#$ ~}"gt)Nx/,oZ"|6)C[ʎӊQ|X"%+\1k M( ľBj2 Ï%7n(Uv4 lrf9EA5YGI'V21K #GF"H NK*gEC%X# @HG"D)59A+ URR@#dl(g ln`TCD$;A$.2ʜJ8W "YlŘ 3I/E>2;RF#+<:@H$VRKlVaCG fCP⺇Z٠z?ھb1͢Y*=g`+> S 31!)4z^qdQ'^~7XF O7iG)őN:҈%8߄eV K[ЖRx_6yPA񘃱MoOE:qce-AodX75wMɼL ȗDʒkܸI$+6B[hڕY_*ꔜBq_j*>OcS"` .,2ܗQmo݁Έ5A X;oB!gV{R1b"iZ+ v_` j$3Q\&|p0,v+OB3mv=;ÒIdzW]6d\&\ME|#8XмP 1\aefam,U@iW7N)ZP~̔__23g2:A0ՀLT+#Z{7Utf1T+jr%Y<N!\ad<)VeKM-a$EV 1lE|j+'^Ȋ}~=~nQ[o w|anfu篝0rc6}B'+a#Fː2e:GgEA)m$MZIV vh5Z`(3*h2:ШQ1p'^Fv ^^O'Y=[z$<>qOQ9޳*C)Y'JqTQ( Ȣ` ZZ Z ?D h$g6rcOŊ~!c\ewQHD<53G=?f l >zy$ɸDk]݌y^UTi^0G]aMۺ*~q_8 $V3 $b3'Ĕ+ ˘#, /e~tM뻅6jϿ9XC|sf͍]F3 D[ZqdIݤ)a&̩j ].o1: !jIܛ6}plTwc!0h&q"jxTq8 7Pt0,,VJ\a6rv8c*gQT&wz7v 6dG۰ rp:|UU9q=O`G {$bJ{_"EW̎&/~+flCVhc)$ǧ봻'J B+6EЏ[u 6qBRP&6V 6P72#b0) cPTqGlw|iXL͖q=Jى-,2j Ma[h-ܪ-I5='go3>,ɟ]и`uПL_ŶmSE͟gVf+X)?JQx4qp8Q%lqL&Hj+(w,V3XRywke&YT$ud}K){ܓdCDs@yrrmΉ1DR %Y0hGĹf|7F\0D8^0} D';oQsL>G'%N`}N|%[<Gkp~IkvÁGI@zJG(l<@c"\`# 9eJ: 1xΒbl&G{Fnx@bx{38] (8ߢڸ٤ŧOMؗ?dfWKV2pb%ii L6xoT-]B/Ӵ9/P]ScnI%cvAEyU\ LW~Gv2-Q6KS,3#I@4x敒k"N'IzW`t| b[cpAefw^6'7D}M򏹂^V^$ʝ <(%!M"&N |@/JYŜ` Dyb0'}˭Yw/ACY Ͼη Wo$綾>2`QalY՛7{7[|K ?s7aϣ|wkU LZM 4O-D3bVܝbފЫ+VU'V^NkcZJJ7 M.4lf< d!o[e.{ |8Vnܭxt+^sI$e4ݿ'Bs3:ٲL{p|s[k$Is eɆ{޼f{O:_gEb}fwE급- WLPw彁UfxMvwU)213r@sw NYଳ%IE_E_egj$&P^ )oI+7<)1QsNElH `CRiQM!z[SًFqz~/Cyua^l im^ފrZjR{Hx JE3 JBS/hj)o s2Z&%/l-ahi'ϷxkoZquZpRR Yyz⡑=O.ذjrD}#N Q6JKXTԴv.;hb~\k4'w!$fCI-ROi,i|4TE69f5 P}Jo=5ѢeppB('OĹOwkͅ}Xmzz6P ssg!!W=_붵:NEz>;{LKy`&EShչeDſk3XjJ ojFn#JKY Z :սto=&}v{u=e6mK8k 3-"{&-S hfDڠUT,9J5ݪR;(& ށL2X!2 ]q]q +1aK*p)Yg"Mb m>z 3%O^,HoP ]FDYP`BovAeq;{)صgk`r5%MRI$ɅeI)3(hu];Yˡ\} 47Ni*t@C/5Qx*)#6OP^"=ұwxIyZ5 Fp~3bƅRC{pYiS8퍾1qDKsr`|QVə/osfϛ);m)Vg(Y&'sV0_L!` '~ vpFr&x ˃Aئ{oQ$:3K,~EbH6q 8!}ԱPU,>8r! 6Sw%^\kHY™}7mxR5[#Xuu9GR"m;G_q.$TrܻLս9͈_g|lx>=^ΖcجTAd6^Bi[ &wD[6vHB.t0l0Rҕ,.O!G̳=h8Ozi:ꦥ2~CuճFQ'yG#u$,q}4_3F-;v,57U&5Y8{g'?}ỏ矟~XAnzat4՟|wVߖ5!1Ày7[npcSHQxe8ՌRQw`hNѪ?' q2"nO ./+c79q4:J^wPg(AkrL1 J]@y XB,cEa16 'SS+ JeO,]|JQC*W4N*WC"lᓶKCe0Ҕ2;_W5aڙW[ XXϤ'I1. H=èzP dd6QW=%yKTT%ql(g$5LYBkm#9ex_#oF>z*HCʫ]俧z.)%%?PLs._uWW#葈DF4QAJpD HET)NM[YSvoNlRQSY8X(RSI\d9%q,@Dj:R Ř 3I) ׊z =P0uGIr †R̆=9uZZnOleQ4NgH4f$b;'BSi81VhEqfY[Hh3rgߏlFPۥRH1EI@(Kd68Тziɟeqr ePSj=HB%V6鰼jN4#0I7ե'%Ȫw>'AA3Q쪼 OFRAv]VTj/wMaܘ݀ez5^ SzSA3#MZ9xbM)Tfu)F)W2 -3tny$XLjAuuzKk+JPNRp;oB!gh՞C*FY^1Tk%i] k,oZA[j~0%d]Pl5,|癮L9zgԻ4$1;=xt7bJr(5Ckcx[ wzع rȝNLS= `MJC#jeRQ$k7SH*%㑅H(^jʈhA #(H8H!9kY /bq>k"v_Qhw|n&㋪Zۯw5 ) ֳ!:OW$F!+d:u௑\7i)'JӘG"h@QAsg1ůG刊t$;A Ut-yXo@G8.j[+`y>\_tj"{RbS{ kyVm=ϧL=B}RW`F]%rSW@xUN]}hͨ#u\ɾD-eLu;TW=vvZ}djV[:̈́!~S!Wh8zMCc̐<CɅZv7HrtH*ñ_/<6uqb_&PP :WmiQЯ9O#p) ueSc"Ev.T\QÅ#AJ`%T8L'ð'HkKI @,9 Q ƑQk< @' @3Thcx=(aT9ź/l1d1: `{TXV‡urϬ'yg sNx[}#M6Ӱ$ql\v9;,B_=Yn\/Zg@:NC-I z ӑq$;Cl-Sj4Wyx~ݻ~T/|6yɊm'# Dʽ%] 1 pǮMGt&ͺڥ^y6~ѮC ̌.䦓lP<6{ӆVB˃B ;ciՅΞ8ExraPR`\UP&.ŸKtzޓ"[M;vO!B_(>^3+Ep_b8s.`9GW6򭀪YGH(򹖜N)bxЄޅ+^ 5U->`o-KW|w(vqDsEgܙŕT/xuHUj!] e%Jz. #_- mn7.JpÏnbL|2 #I6X׌ &_Q, +dr̀h{$gx.?)rA3Ӕط&z"%1Iɲ¢]K`@]/TR-V\nVeEFƈM_m3[q&$nTUS~ީnU2#|-~-o/ l1TEKb.A/vlg'2 V.-q'tnfa`(&K>o.n]+^NT[J^\wdiG#42:>\z"~q6o)T:ǜkb9?mc_!/U}Hp9AW_l%)_j([he4u* vL>wp={tgwc"J-*h8 0o%*_SQ\llu8$!蒲bm!.ȤL:8}'g/p>=$߉]"n[7]9<^PDJ^#ٵ@%9Fh0CVcѫZ~H(afOFM]Lg. r,\43޴',J&WSCIPJgQhƓ҂E8@x2 ,!>%S+(XJVjpt = a[M8\Ѕu +^Ҥ zD[9dQ-{`Ip`8H=Is ރO?1'R,GuJK*kp :ZKĹ ozJhTZi9N"w}`ېhu< @`6"ۙ.* by =Mư!k/VKAmnBGu0я<.!> utÐRt&XSL)AdX-d VD˒HHs(7N6@8b6-VUK:%g@ M~tLq 옆jCGw2A-ÌX<4l:ڄQtz˔5Zj3)% 83 vb3V{;eҵB厯cy?;*Dž5eOnށIida PJSd9:#MO>=\~u8p> 5ym+G.ʱ1kk'-alSF蛧b({(aWT@fs WmțDEcV>)*Ξ%si8>}>x=< +R$ɇ _o{a9] ]{x|^׼$FBtz\jg!bePQ(ׇ\'VQ~`t lJE\Њ`-{1.yjRcoԳge廿wcO.cbehbWY>([tBuDarcL*u`c" Fx@Mz|xxչGcL6U'vRcH.N 2!77Pٹ`"sv.W֚sXbKV95UоBbתY\ s uO5LǦp-v[ IV?2~P?ꧠմǨb>o8W_OcDl"7@i`Pz~T/8MXa![*.C&hʑmQ1@2PE ^RڬxW *ddUɉh6ULV|+S±oխnXX.W=wd 8|h[l:JU-.N^/(pXC3wrN֮o)D͋_,ݷʚi97#<hЖ6Od>mdӕX.:edCj K8q v;J&pY;FG689tUmijSS'_\߯ 4"檗-[o?nw;yGy[yW#RW~_?~p< Cin ] ɛCw+`u-Y.ِ33WsJt6U{!JVvK4I,tuh.s Sga7p "BK!ti'ҰZp8~o>YJb|Qv1CUE.斉b͹zC *pV{reVGjF , Flr,kJۨ[}Ro 9Zʓnq_^y!Qyᵐ'w4\ӋHʟ_zb96 Q^%Jyך#rvK\lN}BnY*mKٺTkh_FYfcMc֋!C9T1R5@ ;*K 96Nqy0}zaȵG (U\O_mHK/(Ds5Vt[w hĊN5(+Eq%"JbC{5vn~2g|~Kc}9"u4rđ#`VZTbVLm,3'LI\Ơ!|=;mG)mG yG;DI$CbD7~>N|<Վcd&78DPrQcqO4|>c=)&mQk! Pf1ɸu&E JM*`'@6PsUͥ *NVޙ.ltT#8=_t$}b4 4֘ %%+4D>V2%I@ji:,?8%^ֿb @ &*ED_i4ؼ;Ty]7zԱ]Vs ݁OVԩKwz2\O0 әvCvSkB~%?]=.ʫ8KY}|^ ۯB{Q+0(XAڷL$a+:[ k\z~ߕ'Oǚ\XɅ&^/r+(nμk6|Z~a{WӪD xÆ]է^E&뇿cw;Ye3b|_L~ɺŹ?Ik8_ѫ/'"/A=>_O6ÿ}@E6J17"N%7e9|dz.,{T`"bQDdk|uDP츯c.cZ'K#%&^a9A39!yg{fN7pKnO6A؎-<'gˆa!N彔SD2VT,G6/hTbm qWˣNxZkgT<ڔtVJkWrf㡦N#Sq)Ygw| }c?/!…9wn֍ ̳=wtr!e_)Jwj@m,GiT:"xh|Ȳc7hFzOA 8ğO\yhCx0Xxbvm ZOî ]o`WzdW=zmEsDJ玆]5p hևή[kdW?rWl/l6_Be˨ϖk@,ޞ/[Y̳OO[bX旂 Zߍ+]wvZq΃r~6//T;Kpy1=m*K&9:  o;ax@ffU?wlJˮ &daN6yE atܰgW{?ӉyCB42݃p4j6Da(IIݵvC2)OIzS~rmf{1#ٽ fo%wڣR LJ' R"[\Əi?iZDa"BMhKX8wIʑXL$5ֈԴ{(4PbUY0jCD0%-IaUQ nb1 ">}>[nsw[)|vfkL~a(CNPKM>6D * 8*; }`Lߞ]edљ iEaұXXȥ*Zc\6 Qդz| x>-2޵F2YBI  C)@D].{zC._ҶUeۗ*YsH~H~w|}n5[{V?S̛X+M'3³fފ_,y},ZA3Nϫ#3z9]>g}=?|bRml xafL»O}yu3W2{;/rҦ2#bBnӿ2 㭏AFϮ7O/beһ19o e #n}MOHa救CK_?R׼˞Qޟgtċ*=爭\:ݻ]i{V Ǽ=蟿E\;J4V<7?o^g<'31 u¦[@pc@AvTL2.vٷUG_˯W-K@"րEL_[ģmEovNU$+Yo9=9\b/>?Wn!Zޞ>8f! ɛCw+rdCF[čV,7^]`EhgODt*"7X'eAa 출"Z>_2ĄW sUxoO B'bS#}|4jZvE28.f2բq%2Q9WoQc(AQ>'Wf=H6’0k&ǒVdZٹO-gf܌R 팗B qԅ{ՅnKc}cCL.OΧ7\cH%Ub(YG le؜**%ݲ+T&p7Rmh_FYfcMS { k*Us){-gˋ\;F="ؓЇVN|ʶJd|͞,kEr P6ZCUVCLNxȂ 5l krQW p%/Deć,!n6m9̩-4x,xFF8jʊP D nkq``m`jS9!_`J6 u_svҡQFI;zI;i ۃPṭ=۔O4&&)u>dRńgk^m^7Ycw (<\tiX}[8|+DJQT"ug>EI6M,5qɠ 5h![gRľM#hTO} dsDQzIn'+LFK67KDp86mWcEQ 1/o\<ɕYxL\G#x{"?bCAE{n]Q%fDr8:gMkaKBȉv͹1CBX1*[Ye2RubIe,+ݖcS2JHJ*'3CGqK( @Y 4c")6Wv0,U:셶ܱOEE'kHkLZ†ȪKKVh0}dJe"*@ǵlc Ld~p J!YS5I:@~9$ʛX6P)JvLN]ԎoYu 'tmpja 1xpQ}WWGwkKÍtW'ON h"jpFRTZ.({X\LdךmN&GGbhW1#:V[0Pm?-YWGOViHXf+י}rӃdKe1Oxj8D\VA:r=>Cᘙ'~eltPd\~L9AT?fbG"PE]|Eix-M>oރ&Pȹjs WmțDEcV>)*s:ڀŀ9rC;[la=gv3S)BMWpF(F֧!*AnA4ڙ8kFu`dMc2=Q@ )N/;t&ܦ{~hɈׯ^|]s=òz: d9 g=J:;`BB1l/?4Lk֫itۢ~ʤ}K>qMN!o,HF-wLZQ|t/#z2<@gȿA"c7" oDEe4hho@!/&`DcjǍT;#asYR(j hW6 O9 mѡ+.6fbZ!C.){Do Rǂ)@Z Son=Kou/UV&D-Տk̙֔|~fj!U!@%9bɢ Q[k۾U#G&L;fGxS9sIUocY,P*V[JZ%H⫩$l(wjXIiE4@e*2XB e}JVPXދV:Y(g32GwFm d&W) HeB˜lrdB2OHZE/_SRߧ;Q|O=gZBw~U pJFQꔭLzjH)(g-UAb b$ *{܅r@jңhrp:1#OE偂!! nKxT.Xep!#=H>p4]cJGTzT{EvyjXF w^, N_?VkȺJc@a`@U.|e"Yms)(ѡN5g iî(I`j(m֑X4|R5^u _MDͿ 1x䜇t: N7)B|i;B*9m9P;1]7_p|z9߁؅ "W}}&q~a%:F'Z1s j?քRᯝJBDG[e](=X**5v[^!yM(zJ l V1[T5r/Ԩu uN2Bkc 7,?{OFr_!% yX'ȗBaTHʶW=xYlJ#ygz{ƃ^E{pwA!J˔g uLzorߩmO /|;jKq__] :p??p쇟>Qهw?\i o o/MEq\C3Vr͂3.9 '.[5q<7':-w9XAvzad~Yk7]P.Tܬ 5 (C uGuؙMS[GgA'8Ef5x818@{3V}=s%d{=l{9I8=!6nF"t?5(CpNl'A簫]f# y:y{@-YN]E ls™*1*NE^F"|SsHng\%z.|BI <XC#w&@2)Q gъ%;0F{iȿaխgmZӆ8I 6W%^l;4L&-;Wӌmp]⸂P\ӚW`u" ?A nF,2AgϬaQj= đ)*#FSÙT;H\:BDGl_.e 빱1$˩M*$gSG'_Y gQ].ԦSn։W̫9Y&GʟQvJC,7Ny"G&<Y?.}c񻘖HRW`-mC~'t\d]D\d\d+\ > )$":}3cB(fKɳI[Ι"R r~!N]Y*l&U>Xzly+Mt>!%|BvDeG'b>A)]>Vi/KfNJ#*WFåӲ/.bDFsS2NGJ-%S rm[[WGwTW<6;SwlvK/w 3ΣVe+M?R6zM7hY6ir~ܞbG=$u\RMyB2JtaK\͓itFq%$Ps!˩©DHG4h>CQ t"8 M|\{wdmHIW OnU*=6r6o>+w̢\ywJt.NGaΤHFHԧxIrM(W[j'$HLZ#QF!gzt,tJ`U Jn)/3nRÌB]:z^g;zf7On&I~g *; _9Aj3J'DsaX ">95foV<9Bː?12(pb&&10L>&.3Y ۍaEl .$g9ALvKx&S95%C(Ku*prf EaRYﴠ2JP<*5!+#^sM+b]H":dTtRb懵RŒ!fX ?Mah:qĭfᨰ"` c" .9WET碎ZBvMQQq) (򠀥g\p>׹*I3P* >*#9/&[g1,y _,__l5z8xGG `mƽOQ w𣏕Yj:U{ٴ]GU{(ԙߘipTȬӮ+hT{UUZhW+ l;I$-`%JSZ#l&bZ ^'ؗN]L"u5"t* YP{j0:? dvb~ jxzKo?ːg %p) bTmAL&ʌdXGhk1Ry_Sq>l4Unu?EAjs͌s ^ &m#n:i%PKP!ٲ jR $%Τ Q娀aѬ; @WOLXQ<2Qluiw*tPߴn7%Yh-2M7w7Z:kc8 mqr;xk\ɑa߲0‹#lG;n]֑g:GqsCb̦Ajr]7,^ְ ۄ1{LfhT\Yl7NYQie@N:Wr=47eݨ3()jҝ8x N FlweO\7G@sksΐ8 ?K]4Th1'- F[GGZϫZ쎦]^wOjվ1<9uf{coя!98/=?|j,7y=^yg'.E//G6ptߏ>o]?{֍\`[ _E76ŶۢibD,{%;Ywq=,;-['d3hZp9oÙQڈ\߿܌Z,ͨ\Y$-eQVFu&%Yk3?mvIoAL(:ɵ-WW;1toIO7$qvyЍLPz|~y %zM 0[ޤ$}|4O {{W|2{6eoqc|4'tt75~,Z3س?hJH,wq[;sc1 "O5^DC x9lg1nf;]qc-rȷBTy2eIJqNlldF Y;erv .JܰPr秷]i2~,H6sATLhdeਂ킣QB%Cr. ?S'Ucmw'8*[Ou6Ε˛ӌȇksR`{6XV#ƧKgTmg*oc*=f6Ӱbikv ,տْc8y07$oՎ"*ۆKiT"56dVÖZڑNmsw?|. ?+D 2 ډDychCr4g>{u>D{y} *퓓x˘Bv) o躣ugݥ겓d_(_^AI~kyk/?;Zp4,Fi0dmSbҔ΂&y.K8J(Jng{C]IzTd`Y ]BdeReI* e*,N8/l,ȉ4&z4&imD&L6)imFc9댜5嬕?͎t-Q@F)'X#xOeO #jᵯ"Х||,?Ukmwc^jW,qU<+ KZZI{>fKfCyOH[VgP+ Tlɉ3RbZ:J$[@j((2d< ©fbar,=!3k,O7u!G)ۭ]X ߬ a D 3J)E"%20ȹCMѝxXNN/vE::Jv,bXN &} ioTzMa[ QOf 5d;&v ^'FĀtʼnC&$%6dI넗0To^v]ozj3r5s]aJ+BTum5)]v\J*sȧ@}cT򢅧*j-]펷t5;lw(avYv5sU!#cȲ(نdUS9}-x$U!CtTedIx2:#:fC,Eag˻t/vv|:P smnXA]( ~? ,ql?nߗJW.WW0f8b!G,J/Znv8bZ G4Ttܽႂ!\# RL jK͌#E=PI:w=d|<ÌC`!h*FLUR";γQ5$MxVn ZsE@M !/@&V;⣶#%x|^kKreu]bV#L\kl ~x7pQ9WoÉ)\o> ީkthf~[~tt7 Aqx6!g_j8g8Q9يdB%j9ޕwݛ^vE6\·gXgK1c0?L.Ն?)i/>|riܲp_:#FFJrEfyCQ4e%?.f=?HSUGQW+I$^:.i&#e`IדQ(>ʽwxdC+lmx"mpro޽zͷ};.̻yof,鹊k7&Ml>??4] M͆k -Do=u5]q?Puhan[[x?8:[fu#mTv0+D6do?#Zz* BcB aUqWe.Jm}M1[SI=-p(1o~v jc쫌^ N&­Y Ȍ_ ֏''!i;u7w{ۨ5Q99%`7<4A&Zm:uGGfy "$^aO$8lRHP6$SQ88imQbΒ 2p J8&T'PUa+x$Vt ]kzVCdZ=J% {Z{;P vC 5=tCŚK/$[WWw#-k~'jQ`vB];+Q}K%,Kwl`oU!¾+uuU䦪'bL=RW`F]\ྨB]WWJ]STW 45]Amj%)+7ӭދX]xĖ#Y/?~}Motr65lhqߑ  z@$*m%'PY6fot!}DZ-j'Qɇ6UWD05ByPizJ*m#uUVloU!W}QW/\]*kVudԕbV.oLs[ S~7?룅ԳV/?s:k+0ӠXuѰl)gt0Kt&g,lC)1#OC\ CIMOD>W :>Ni\4̈́7Mxi&}F:h ٯY"|;sS/ 0 kZ{^jZ{^jz^*KRTk/KRTk/KRTk/KRTk/U^jZ{^K%qKwȷB }+yY`QnJʶRچ,}jlH Z2V-O\guCyG]NG %!j'bX3I8G1Cb>{XpN{@$2 YY!Hh7=:# k89ùCwlג}ixYK|~or5T2/-#!l Y4QYp0sa_e8鯫ۙ6jF0% VO6c9e%w OܓU&+j,`J*S0 ;3Dz-@(81r" DFaIZ[*I+0MJڔ<- :YSv/j6tyWܚ'0dR"d F4 "x I>^*]SE2Vre3"-]Y1f_OJQS%`P3O~ҵns"xQ"ي01E悌g ADTB<UqfYWXfG>u.f%jNDRp!<:rE^K2e(aBisNXţ;񘃱&^tt!X:)AŰ78-M{K31$@jɖM^Y7"lFAbZ}E `p2YNNLr@^ 1OFK3U:F.Ǎkf;~< ʾ3KwcA[sZ3-x*ܞL;6}e61xOeڨlTs {WBɻz8eg(t}5*慓AlObiާi;D?zWM9u j'kɧ Uh_vf4qU/.W\bt><0Jgëc_!foACw*7c?]Z49x擹?_//r4;^is 6g5Y0imQZB-A*M>!%ߟv9\R*P+Ů .T*YK?RZNctٻ6$WnGe؇1FcwqSL-/od)$R,W%QU_~f1$82A'ϥ2Ӂ ^{bTDhY!;N~vWi]~mQ~_qX2C1B*|h%8ڛLG:|,Z9F|;6;n#3&g$S`3"x.dtGp ;e%)*+pTg [3RJ!-ϓ؂0|-BYaϻĨ쎑o>O-,^%0S{tyk-`SL ~V4٦fM \ǯ9B=#A&%^zK2^'ӱٷeQ.w-]6K J0y BVPE@Jq3E4`)SRX"nm644 nV>s+ʊ]???e bd|[6Rfmyezgz˵7BՓѥoVjKIو>:1 XY煉xGZ-_T[-ZԭXDL8e ey%6:F+Uy4975.eP7S(b3jRK );TjV^&ΞU"Gr;xhup澺τx~_'ro!w723NIܝU8eNk'Q|!rrEr.Yrd @M{!yU wV%˘18 8K l79!heP¡Q@kgUTgYoG$o6XXz߻jnq4gy&RSEqz?]! ^m8^ sQ2I:pf#)&pKL88YWrWP"*;Vvi3&L2yΚy,Y|TH"K ^ 4 ^rsiC% 8=5$zҥYm"YKDrvؿ -y7E/]Г ,hE@11'"ehPh/N+׾|ߝSRc)!RW\٭ߗ]7?=h 98 *H<9rǥޒPχ89=OQAeM%ϭ & =oetf`I{^eweC .k ͖k \vۼ#0%jU7;SL}Յˢi(e%'F[~9Tfp3~^(fAt62%2AxHhD)Yy1+L"KVy( If#wo19XHn4^kjB奸j6t=jbŦe6 J]$4[]CM7.?%/=es^D@m3>&#~*3nw3"1 ԠOm,'ӿ)|U_o\7nC/%,uߪ,aYg7ݴn}d;yj e[# _9.[ƴpڣ%*g_p.Oa ,sev gY:auFqbx\l.NLTz٫ƴS]BhJMonl[M=5@PM(d C5x:x=IsqĺH,S SϦ-#VGWnr;+~fb#4Kgpnx@{5|#etq13KTx&69ez컇=/bݩK&"D vұײ尳i1$[@y3,j8x+-TX}vbQRΕn14zJ{4TQ ,`M^%h`ͧm[TӍׯЯHZUS(1CPs+"~gyN:KއJa8 ",y‡~#s(n?ҷ0A]߇hh_( [F0q>㷿&fǕv;|:30E#%+AfR^g~E2&*(kϭըc#!// q<{850C}zJ4;GSj5?ۛLF|?~UrGñOjz,5Cm= F QK.7[m]\񋁫".*{.8upfJGjp293\D\N=/\Lck&zep%^W׾zf *k*v)pcpU䴃wWܔ +X*ʋaWEZzB)\CҨK˥mTn2T ~l+Lf:У? 'ӡf_30N(Q րu߶L Bi"IDBiV vF)JGFGRy77LA߻Aj+s(p6VZ7z0OnxQg9|I+ \#C؀g1  m?@( "? BKo7+6ן~LߨI/?@LpVb&%pY-xff7%ۨ% ޓ'!X]I0;,'%˅<,#iNCVOHHKWu}ZXxo 9 /Em[:׫/X.ȣ)I<<1/E8<.eX`0%6HINbuq$-2enQr1kť0*-3*7# rɍwBKqWUZ CwWUJFwe%ӳ_g?_rU}_vy>Z6?j3k:+WR6ӹ ϰ&;/ A9TF)TX d\HI jc!Bd[R)؏C*oN!Bf y,*d*-]B-81Gmb Y&YOj`)@ 7׍^] /My+XjRc;o7yc3>OSjCzW|]!ʀ4EzWW۟I^>|o/M?Ng_=Uo\^WCx[VǨyפw(,_ʔ_+7ο~Tl륪iL͖dciʴW@=ѵ׫͋w6ZO!;= ~rGv\u>,k3t%Z%gd &*S"_S<\4EP tж?zYWwZR^{|ige[|?웡>ԅ7~U:l_bhˇҶL. J\0zKL&*b!-:s,"x:F5)^J}!k/% Kf2FU-8-ElPz?wurwL6ē]A W\Ыfr^#^@x x󛛨*tdӁ .w9e^BM?/>%kT0l֨XkXYQDVG%*AzL2S $@18IZ2E^:  ڶ0X EN;E6l`<1IQo# ##O' kgap+q8<畭Mn]]:n,~U[aчW *3_Q!HЇPFRy`aґ-uJmFAD2hJKQT9^TJ$&+hB!imMYRr^EƶL7}CRc)1Pt!?tVfE/ɖNP!n eGRloh(.iZceKRLPZ @UN$] oа<ڮԯa٘m CY1WQ\9ȶX`%V9G@9` iW s}H~l[73W$w3mAf63thN}h7RabS.!;K A#JPB/|2Ȣ);P[$t I 4FsQ {~p fx 툴΅<U`Se^~)% zU0IK8Y OF 'n8„x`257 ar&@d53BAR(l~doao c9;k ccIZ ޤDB%jDĨ%Жtxt`kX#&DCNC@/ mbD%A cQD*gХufgꅆQ m_5~߽^7_<7'·m񺽄m+EpdjajN'ۧz@۪ɗn@A,JСSOe^ߧJO5  HMF:IF]H(vK5TR)T *%LƤAkeG< ZQټͻx<83Z䗙!w11A> ků ׮s›C2cc=B&(- fkNIrRruA@TP6.琳5E@B9HM)2 KUDJKP:xCSfxLyGӇow>WKO4,Q\@|^u>0{yC7vx1fevЫUgw1i)];Xȝgc^D6wwCQ?WR1^.0_ՋWw˫w9lerKgs1 _J[v>_0ƕo}vw{>DgrX,ˏxU{šܸ!<,_m_kݘGwX{`n#7?77m4s> 7ӣ㭔#e9)QCF%K{ֆ:#eoM{\9D{|r/vѪ\b8,&C K|HVL"XN#ZeEP3^$-[@17rPQ F6,HK&j3qjpbX;>{ӗ^ {=JS/%E%2>-2zmn+/8"ذ >AgÐ%$pV/YTeWR PZTF.;mhETDeڤb sĈ#rR :HiLbkLJ5*|a38bc_/_8&z{ c۷=;X?z&]M^@iC1qבV"0.Cy UT$lWw+/sa怚uG[_2rD g{- s4f=Zh]]ٰFxg143]^zTիŲS}BjIm oiN hmM.z>Xgܐܓ-Nwe 3211s›Z>6_qk}u6N'ӵ~YkNGQ8_3Ո5VcTUq868i7;D)0;Q&uLe*I$)kcVQEAEcM웪0Rn ;>!lt'L=3ΔyZ}vd#XoН&wS>Kq!COя更J!FvӴ(:Ș񋝏kl'(.NNK0$/Z6[R*IGk+,ȶ U 2u2,IM#P6eeE$/R벰906Qo,u]%/y;ġw=VBfΙbW]"*H-, Z ٕұE6$x½ Ŋ!O2t ֳ+XQG %Sw% g9QA1*RA؜ q1H1G,?6t!]*I˚UT&1CF*3R?A%| !nms*c&jFq+KhWv[-ʩa6Wȇp5( KIPV{`wmI_!ew]m8x9$Yܗ ~X)R!))%-=Ad˜awMO=~" S^l?^|rY]icO/ y]I~r&KkVzF G5RrlNpty0<9H#,ÒWY9k>cƵ ̶jAsz5%-0-[.Jy!s81ŁE;.U$U}zKۗ/'76Jg%]4jM2Ιzilq9umzt ZQgZ=X^Œ6\7b@,:Ĝ?<?^tk("VtVc?ZQRmxvgRN6M#6N58:#Aiʃ?|0Yd>ceP[5^ 6/./7߽ۣ߿;ѻV`M$87 CQLo~}jkMM󭦖vZO\]慚aOGk:pe[[OgP .W6SNVa]E] Bd,0 gꟷnMIlUxd>'k+G&>|Q>t8LrpVN;hQ m?ڔkV(?׉p,tdR6~vdr_9NHDGtтd[zRou뭞ފ+[̊kJU&eLv 5dSBM&sVUwҧ;W:Gh&j|s3DE0Bgp8F3CbQ,Y]8g*S J`h I&@ǘŠ\yc%W|m8W\ѡ]rFtt-ٗa^Ƌ(}?!rb?2IIAU֥iI$sy.kKO\qV-AL7[ XYt`@bHOLV2 *Y1L٦U̳R81r" `DV H:&M"V`!% NK-+Y5rvE~-Zzt$myB$W)BVR02+ "J4 9ʠ$ ؋@M8ZwOZ X MWy6KV;`Rd9ّ0P Q'~ҭygedPN`E@iVd+Ja,Ap%2d&Nq{'W` ˮHG5ytҡ ٬G˞D4-WmAHUAB_lD&Kǫ^JcZaZ. §VJ̧\0Ey@lHQY NN(u(P.}l*4!XM6Fp%=ɝ꜋A1DH? &hupi=WBozv6B4(&\`NwA? 7M9m /ͿƓUhkw^idnW.ڏ~řxPkk6;hv2=z"Heb!UŶanoƣs1|J&sȧPh\ctthᄮGMS\ t`u`U*Xޯ:݂Cv&02X!3)Q ằ!-R<^uպpx:/6k%M5 +ZU `%$A, z 'PA½ ZkE2^~$d@#C$I9# JX).$3Aƭ2m+BJt>EFsYqU6J X,-k,wȹ?v[ u)$bvt@i>fE@+Ÿ`}8+}xȄjQi\5qb)7(Dj߀O-9s&=V>t<ڽNCPӉ|hQhe:M.[${.Ũ1 j3x)Q`HhRPl3`5lϗ[ynOfHfLb MÇ+4?*yCgZgdHe,} /?_X,z3}ޝeZ!}S:L/#ģ/Ĥe^굺__~n?j}\jWɶT-Js2$QVf` [9km rpmՋ \KEI?X뷉t&?v.o 6߻Z,ղ}#k׹-2ynXjz9yQWmh/jՋZ];xP/i(+Myнw)Ãqrr6"t:9Ò?^_=G|r6ˇ a5d'mcӍ 24odiT4Ma!ovf&9(+LCrJĹg2 FX.5I982KP T&7lS* tQMTw\Ef1:yi3%U)*{񈓅n|zn$۰~<,uHk'[{ԛwʤo(bISw~;fٗfVx'v̍ Dn%mVʲFͩ/~˨`v@dO%ݜ}rw{=^s{Hw6tg݇ݶ|}/F^s;?L'-㭮n/~]xn5 =A9#z멷bnݵ={d8"~Ժ''ĥv(uϭ%>Q(Q8Ⱦ яZ]̚q4wkTv? j3 茗IdQY#2mǓͱԜ )cF,Xw''fDړ6.J_"(OrVަhsݦLBmX5c=RMV㌇B ׅ odsgo8udqqr_hr1~4_Xc;6a%cFx_;T^ǨeF02,^ꁳFںU2Z(DR) (FE+\|;. r̂Uӵȹ_cd\4Ї'c&DLvN\N-MPsf:9dVR8AAsE2!Cȋ2Y511+`8<Ƭ# C&kQaOt:,îhjP*kDk^#nx'a+8bP12-51Ҫ~{ԍv:C"8i%9eJg0bL2!'-rUh-r׈lj^5^*%ԋՋz׋8^eӇ*w?4WU渭~|GEU~lEd4Jf9ϛn} uBpnv&=24,\?AEwn_о>^HC xٔ*i @YWlB'tlFjpn AT+L1L\~d)#WEefv9ݒ1˘ekHr(='ŭ*Z/MLŒROJsm40*e,(&U‚[A}8piԒ/O?B00e]za@ LX-9=_"%K_"%A?{F俊vض(ep3s,Xl`E"K>Iv=wbaٖl٦m VͪU1犁^@$dD.E21FS 엥""XḋHΉX^7+Hڕs;"8Xj~( eg"r2aJ%#.>~؉oa ,D9$SJ[O%/! BBSrV."::¼sx̑ӴȆKno:aT .KMξy& վRm);i~$CQɶIQg$Y6!ˠ"3\e3hYfM{?͑v~꛶0'+DrTe "[1-~pH9iGga:hc"Ȩ5H {fHYrb"s;23N "y+ide2:FΖ{!yv *3-w~r.y+s>iq'K3җ6Q &ONƵBn<6_[` 1` 7*0"%" sDbC:(2C'A0:ə xګJNDIG3wAXLv*!X/;붗Wz6!$G[ۭ[+;ۛ'lԦ4H\RK=ڹt$RJ1JH#YoȦ;SGӣ؉W|.2,s̸e>uP&`PO7%cy;Oh_z›ݛY-rɻ7Б~4Tx4>G'_Kqz>q]t`Ht:?^m|LyMrZHusK$#ʆx_fokFov3u,^loVF3='3Sq:z^Zij읏NKRKaK4x:dW4@Fi*W 7[0Մd}Sz%&Q:[_ǐA`IJMQNzoDvfTvY![VSK{i- DK{eHsQ3eDlutT*"@z%:.ol~)\Z˯FޘPD~Js秫ͱ\ve96*Θɘ39#X5V2SfkR!Œ)7htΈF>S]FTe2rw糔RGCDY2e 2yVzMӒn jk OXRU5IK ,|!HP|= ^9k7fF ]*+&DRդw`&\Teރ܂e1*Kq 19eKl6\^zJj%~Ņ។ >AdדZ^gҒIi3N:pu>WQgK#ЎqZȉ$!z 6Ԉ"T`6۔՚6#PϪ%|Wfjؕ&<YTIӫAZm̠'(T|^J]#  dײz 9MXPKE5c%eR {) 1;R$'UZՠNIO!!$%F`&* ]RYXAd.HyFDR%scvة8jkqBԍӼoKU|;FM'Mʗl2PRIpjp9v23ke9Do:@;n!Nǩ)'|Ֆ}OVfE-G0 djJS)%E&%Y,nBsرG=XG'V}*s|NIQ|7Ke% eY@繲YrfZ,qI/M:K\=.W7.]1^}&j:^]0ZaJb˿VA@2Z2HRW(rЪN<}vuu=7 2^||x$u>J]V0-rE琣gUPx(Nv<74q u[R#ΐ~8cjo"guKѦox-OI-N%TVсJv (Ѻ܈CHX+ 88Rd>x-:Ae$W:hpp:!u,&\%1W cR)X"%5 qDIKU8NwW~lTAifvǞlRp͑ła9B$8K L-[U Y,TiF FZo& -R +^*ې'HЁcd.;FH /|}6HQX qGY{?3W0YsŧX<"i}i+GƉ$Iy ? 3LNI_?o\gZS&-y?^#r UB>rv~bK{'$Y}m۷FY2-`[]EC.$OIW I (ծ0zi~uovyq3үٙNGWbrI̱Ah6]튣i;Hwк֍ҰkG~MÈ(7X>#42fb|1ѓ1 Gj㨂tB+G|hqMZsM5nNrD|~?>paߑ4a Ds&᷇@0~ߟ`irȄhwʦӅ֜탥䇱_!͏goRTIx[b Cl Pv!/%[h#=$ ? *f+p㬼E.v`n]McgU"*#ȜZ84(mJl2x\4xQ{%HB!X%CowF]uB>`dKm$湖~먈nAfޑ Yr#}PU֚x%% +i(a$^-d)xǓ$ Jp͙dUI-#K@a>0s{[lI8abHc}}h,Kw%t>mY[h=CcGĩͬE+ǣOǣaOjlw 2qw>Hx!jv}| ss} Kf$k.A@ᥳthW@Bi>42, (8 Bg.DV&n#Ȭjlx|2EЈ4ɸ6'c{{nѺܶcOGk:侀G7`-2Fվ,y\mJsKOo$n&H%r0:r;9}mr .٢91B(SedBV#v^^Gi޴߻<]nIBO;.+/y`g3FceF^-4!i`}#DIHdI2d1*/'Ɍ:jMQ Wo,9נoEKeE^j$/74H2Z gKg?ӕ?_d!]j鷣 6kR yB/%q |/ަ,z_m/X5ji4sz(̕ov\cWd-gbNxUcLgઘkTWBpup%2B u3bXV' ۈu zGįъpvz[2j;^`~-E\.[%eO٧2, .bܑ|rHzfVI)}Ϊʺ+u9jY\c}˾HnCFQ#WY$Qd  pxT#ԾhCޯZ)}4!%G(bf]QrZ mWrd%2+Tr`\U1w/ x*VJ •\Ѥ+<"+7nI7kJwYJm~ߐs_YB;ڳ#^ZZyt\(A?p2 ܷ ?L.>u_h=[JijG7wtKS&{f `g(Jɐ938D4̥D|N>#EqJ? !hmfegTᆃ5P6.||qtҰ K(W)LX,s*V)& %ܞP%-,6݂K[n7nl{2@ xYeԪ$-яI2ꬹ(3,耳«`]TN['"g9d C:[$}P-Cj:,u먉{1>lb,} ୍(c=1 *QpVZE).uټWXmeZ /XO)> FBY2s*W z,TJQ>PlkE64܌כMYAvTp=%n +@8O:xۻ}V 濽ӟkMm.m:0pd 7KdLkv>6܈%M+5:D1(w^T,o`{u ¿rrF;v󐔑!F|UMQ8 RvFőwq1`)W^TQ2Z{8_Mz!G_Pv=z6S34E1??_MR84J]if CJ:e\RŹ^-uR^VK5,Kdh FJh.F,A@&()fahm1 qY͑읕΢W?Gy3b9Ogѷ8͎]|mbMU'Um~Oxk;sU;\?4Y鏞\gRm'Ӵ$|Jgsu Of̠ ۾ L+*])]-J~ѓ  O^+?L'[4Oq~/{]<W bx;v믊>R~7;bϛj>_Cʭo͹ֆ6wq!ݨ >س*hBB* QSke+O^dG6Ɏ= [my!Y`24dk J/I%U%{Ӓ} dHa$玟uwp<8ԁK :(4.y91AqtIhdYy1J(Llt&`432 HƀSjjvN - <7eU;LΘ aqUAU0Q!kLZ0#g Cp"`"[k2mFmٰf㋅7}cssGZtD7OcٕIWy5*43n9aU]Wo>7wr#0멢fG|zrHJ0E(9YyYRW{rԓ%G #r.KV:2M\B21P!9fLN6J,Ut,X5ۨX R Rd܅#c5f~rD8, ֡h|Z2?mL5mﯦ?*9L|*| < WmFS1w$o| Ti+Lm :F f6h^d1 ^KcT~77aP:bIHghy:bh}:b}:L5N2& (eEr_/#_N4nsg' \/bŷ_r2B$KP@Ch|0%˽ΆH*?)pA3U4y˨H@Tΐ䲌J*sap 32q ̐6,3Iޅ&үIMCU1݂yhUnwOsh]eƯEfx+ \k5ZMYPyHCk5z Z,2O4G$dl///q =00W*jə/xa VDc3X$ޯ7?&A#fϜo[zFL>\>!hcyA%nQ,.#Ac8zd5<**KcKpo%`J)(ҌLnlElʪfg2_zBODɤDIhh8_UףuOzIq{^qd ]:)tJ2r\pSрaPCܙ>:=vwIuG֛b? Gԃ@QۖODPE7}h2cɑ!qE)H$#%߄Lvܹ^onPEݾd,ſwbh0>3M)13 v=.b2ͨ`q3,evjC螸.X"s6YJ,`vl,|ăy]0G m_>_"5xz'=}YJx(Y ,| Ye!A|a=qb+8qڭsVfKν(^vItdhR8fEocI_$CDPEWz(!4Jax+MR[ąU{\TPO_((.%HOg$>5ˠd,V ^M4\ϙ$NYBm:KcF>̧)03-C`!h*FLDUR89>GH[QJxV۵)@B,jQ xczg26y9Zۮ=9;aC~}bȣB ԴOǗ%Qʶ|.ռa˲ܬnuI;2Zc|Ria̵(a@RfAvF #"s9LT cĘ AB]ֈ\'4AR/BHi̥^sM BR hTȝ 01iynF |ia݅aCLK5+Z1K@+R>qTJ0p)Y)x.I_Ws5"HX*38B 1rsHऔ&x9p> R'`fx!IDk<*Cd@W5W#rfsS{(4II̻TazBqL|rY^i|O/6yn[dMKa"<䆌✢瑁;< C8ti:ë錔azqmQ,9$ʁƒwYL77kxE2':'hV\}XΏcS\5JOtU]F,ffGk`Xd9!j8gzIK+Jk5j8;-7#ظ~_lup# 9w Fy88=-]s~,|t̍V;L7ظ6n η ##%::'Ai4nt`ŲGëDOfwi v˗6w%Q!-e ,}>)\?x~6+5*旫08#vo??+?ɷ߽?œ'v Rp >_BQo}h.[ wZ6z1Ọk|-~)>u_cjomCߎ4(yhsSwMtu=U\~&1?LxDCoSvMylT! 8 /]9{ș.uN&xXxn(w >9{_GʑD@?T+$7ds ?yu#>EC#$HӤXB+H3D$qpڢBŜ% Lp.dp-ރNE\݈+Ȭt>Icntaڀa2L .Za" ֊-+lk.iO )AC@f5J铝[jR8 .XJ@N 29fpc*|mS XpN{P*4`^O[ƬZ$dntJG\p-rsEvX )*A7q?l[˦h  acڦ -dʂ`8p *GN\/In+ jZ3XVx=|VʴK&a3rf8 "XʥJK0 z-@(81r" DFaIZ[*I+0MJڔ' ZQκSOQGܚ'0dR"d F4 "x Zx{hS>}-P;J|,qU<+ KZZI{>fKۆ*5ZA0vRo'rw i(P64\CP)V*!G\Ȝ^q8uXO|%e!:eN6_XJ©fbartyȌZa9˓(MyF^m%̢ávEZc ,|4zc B*#o\Ϣ\Ւ sPZܡ&ўxXW]8ҁ-KF'l eD|R7*~( (a'ˀ>䅂MEw:)Z"Jz!|(oD X&B`Z}E5`p2YN7޴qG)]faZ(soLc%^{{%GcM;9-x96*f"Ā!\GP7P nogZrSUvj|FՌ+;gR15谪n~&s;Uhߔ~v yKw>/né-:\CBmI5 0Vhzj7:j&z4$F-&\ih2k_ldhm'`ml;l[ X~ZtpB*|d Y%Л@*xJ3gO/56䓢jeh6rcxJt 6p,/@Fg}D2JS7Y r.٥uqC>γ:[|->p/]mp~S˅>fcALW_dZ> xzbxy:FzP4~T-Yuk ^z[g!(NG_p+c7WK Ktf CtR+e &6b)kr3Dq%{[9+mxK{ҍ<_Eo z}zW(Oэtm.nimw5=մ|tOӵk2mnXh:~|M㶭?^\q-X.j5wft|i?TsU}9VeR{{;&0Včr"pA@y^z-"%QW\ Ju]]*WԎKLWW"Ku0jxu *RWJKϕEH] "r%  k^]u%`H] L*ޠB]WWʵWWG]wH.Cukqvh9VXuj\.cYѯ~SMƳfsHsN+&sNFRzǿVp4l5]v_tVB嚩ޫף0dިܽQWZϮPzJ@O?>B ?b(TZ۫WK?eM"[piaK|P\3ZTӨ~ llvv"RPFlF2ʚMaC|5C4# BcC?ˏokY&ewovyj-*d,YNy^f%[h5hFJ#eLv9ER)`K7kQq|ʚg}|z#]-?غ뾩ݑn<-\+t=[J|WAmie2iA|LQ0٣/>(%ұ = JPsQ{V dz8&Y7ٽ$!{k2Ynsdޔ@ml}g)jic]M&%VbrǛplLE}r*EOYfeiOKJqpʉ TQP:րR4merTIl+mY#Zb+j3q+jGݜii?;B?<&d'CD<rףFqy7lgkd&oT8 LvI3Ė2^G/MU#b3q#/_52..6KCu6[%g⢕mqъG\Vٶ.ÚRRl;cCre2.cv%v듍=+J'%IWXwQFCi %zA9:mUV%P4R$ХE9rV&~(8l̤d<9mQnc `յr$wJi_>iIŁ,zaꓜiR*S0x Xs&Z&3%zC Aj #z[cJ ҬBPkeeg,6sbS 2jp4%K"KdPI;B&< y1uQQ&[+?6n*BmV,XRѲ IBIJQ!(H`&6X%#{T%z/ncc71/r1Jo^%x?/ˋD8=ʙ?+ҨcұyN|ֵ:Y^}l8Fw;"\9M$-Z:|k+:/?)@PD:6KDPR1!jȴՇ>Bk,1F e Y(@NiJ&p,JOYE[3iʹgMIh{%=َL띘҈1He]a.2Ůf&1R)#N-^Feu|ѫZ_}g Ȕ$ fޛ`P9SRIZE+C)ihBzgxo{袭Jk1(bWcI B,.u[+q;А S{ow ^w'qhB%v^ճv Q.6crPׄu)r4/9"4V'E.ƢE80vS*: K*2Cd MɑhEm@{gaQ"&SҪD#cj.o&qrĘ8\R4pvmj}6ng=mxG!wk=|֜fլ R$9`^ %>h+aBQy("nzaO)!GF0x+gX9Ѿy'Fxo~Д-ԗe pduU͖ _KK[`g~ƣgfQ/Nτ˦ADA{ֈT2(1NSdƸ jqbHĴ9;gmL&&/\qMٱ2 59g4${&fF"Up*#,KXPfXb1ulߔDя3Ufw_'Wx6nC?J%;]alcojKS{jĔ[ƫ LsT")pDkyG`Hk)\9ڽ{^E>eGmDE+j?9GGZGڌ G7{q53帪Notc8<G"ڭm\Xi7Y/LPs-ȅP+'#s&6 IS} Η_h3"ϯ<1Eal&<.B 2fWE36Z*4$AJcuAOꝑkB'QyLtVH̼]tJ*IhI2NYw}J+.أġcB&x\@X lėZxUSnqrF#)*G6cfGYkkH+k/7,PqV0e53/٘ ٭zzcg|~15I?s洼`]?z??V%?3Kiobd~N7?գۭvu*OKiׁNW}q5M/)FE*of;q?݂Qw%h g}rMki@|s{'YL?{& &<'O]Et\}s;W_BiZsƻۭ>ܞ*e"k;a t`o׵?Vr;}k^gqz%#}kk1^|^/M˽~};/j`»p ˜V.JL\LݷdL(/Dk;M=<;-̹;rwYw=ol^X|'M =h=>ғa2C)z j2{oWN_c_OǰeV!Z\v*U3d<;:h\ 'w MkaSIhDrt?4q?,.ců.̨o<<9MN'DkAtٕ@Σ͞yIadR%R^]\wvJkmm6vdovՓ䜛KlscՐ+ٺÕlJ5_i&NiV zX:(ͦj C8flk٭7߇)Oz9-VNו^l(k2529h+ /E1\6D癒ouKGlOS;J^youIWAjUz`QL9&gȑ!;mx Bc*ϳj3)e[YOYHm\ ozQS6A0(\"üjg{8W`3rW[qpu6[n}B?m)R!)~>$s9(9գw ADW#4uQ$He#'u"q˞8t[!N%@Yb:(RͦSL[E&W9")zj.jq}K>8`7ݷoφW2.B!EQ]9[rN1gDY1T]SQ>CC~(+ILԿL̿rO /|q&OYG#mq#,c ?@NN0@"S1`N 384eO-I%+y\V FbzrKn+d5'e)Yuw:!= [I%Vc:9YfǛJhNsc)ruSc&Kфҕ$ŵ95O9͈u߼xv9{|pꜭ!FSĜσQ޽y̒+"ѫ wha=儴ąi# C[ZYר!F!|6Gë91-CjGOmԶFӼ:(>O|~+9#[_X~VTL_6_~pُ_/}qg@g_g0˸)0ľ*mūF`sUռ~ɛNKYżdG)siWTT&U̬(?h!sz;gjqktݛ|z5z18k\ah#XC&t=jXIuo:>Jwaye>ǬÞNt1uHȂd͂A:X\rIAFw'Ij{>n db{Bn`흽/_p{ҍI5~աaj %\/jggu@o_O=u&yW-zGuR~O ;}}]ǫx>\F=@0d0T`%c"\&SD$Tz%Sءj-}bjaj桧&r7犆,@\uD`c|$.2LU~O唗Z)!|z8IE Fn0(시g}D//$ו/;d MUk6w+OL OD/ a0Mu@+e#=hno7RQg“$v6r"8|lN&`žDj x,uR45U?2EENw|BͿ-.gfѴƲ8eQ_EqYw<(H޴0X(A>8KoHH{!tqU}1"nsbz(K3A ΕFZrFBiךZ Vծ)>O]tVV=XѶ [b"5\G Y˴6DsQq yޘ ,ܒV#Ӯ5k뗟2.Y \_biIr*bt>>O!VqQ\idrs7hiaeD~ٵɜ'JuJçaW ,°@]BIkh)=TP#J7kx4`&װ#`=T2qPu\zf%{xwnQF+uRvP9<[^GUaP Ha7U1UŜJ\#}) ,AϭR^-8e5:|3vB!;)Hb{b$ǹcEn'}@')sq{)|( 74`-u^Z*^>?w.堠.ǫMq38 MG_իIqV&1F"kC*6O9Wes_wqV~6wbӤ#J|%*;>WW,oT J"B{bJ')yN\}ZG05zZ8Γ”ne}v4C=>nk}Urli4K0ҷNY:CdeٚZ,^вmSfbL㚝koEq;C'Z]~oqdɣ iF=M.n{C@O 9U{;<^Iuc?:Ƶ"?)/w݉|%jqEu+ƛ+1WU[&<N[oPLƎRޭ0~آK0€\ǪE$mQڣܯ\{ Vct ҫUqNLqZ\-Y%wU\'*P\(DW=F6C!"//+\wǵ؞r(ǿF;y>4o߭S)7L4>kawI nɂSf+[N_,0'nO0||i9K 8J]Ie@k@cw@flG '+B>rS pC2C _L0BiXd_ٛuv·C,-9tհ*׋K 򮞴CQ\e/ߦ&qtI͕W̽sp a򊾄@Zapq_pWS=KכbFU[MAE.z#o_j^z3Yne~cc&Պ%R2h*2\=ʧL^P V>1b$`FȣL.WZ2\[+I\ ٝLȷ7(}'ʱQ^q"Kb2ՏQ_N1*MJsyK` 6M=}]`LH0lI\:@d)4'ؿ/> *grթCmjܛJ[UimGnw>ҭr; 4![qr;,d0y\8Y+0 e[UȚQôk`7nkqMo8U]+V .#|֝ocUݢBXE.BZPy;c[-Ң)8IuKFY?̚RnXܴUUU=ͿoL뤽e "|Ө@Hgd$_|$x)בzWZU1QB&i01\F(N# 7Զ|Op2J-V3c 8#td)%!|pI DDA0HH2uE;&̸R (c&A0iJGǁ{I&yg-*(?ہ  (pzwɝZ A@3(!Cs"):a#i@ Rp.R!.S@Z;qQ!%7IY$Y Op)1eКQF"Hu0" 1|K>qv1>^2J[ 3M^B6!Jgqedg!EyltV kEK\`I QehJM3$)g)؁M\GhPv,ȸoEGѷUj> FRPeDqZbQ@.z=Ӝe5x!$ʷk &B5ZSHZ% z,HMEHPm KpnDȁr"_& ʣkU_PX-4Obrh[:xD.v (ѣRz`2GI3ЂrCU 4zr@/eUH'Cf%dp. Yq Eu+fwmĢ"V.kh=.d-Gm30IZi O<ʈ̢H0F@pKmZ%BQWf%Zs$G kBBl AqbY1Dp,X|/ѪOb q i'aŕGIJQ'#eȆ,= 1lE)5>Il2 LY=&'naPwGیzow]@!8qD C!CXEB J&z)R8ēޕ6q#ٿO.Xu̎"dY[U\Q&[F,r<3nU (K<";Z[ct賈#fjBaK-0&lJ&80)8ЙVNecX"mƣ Ef@8Չ`xAV%TTgԌQ"!\#)#4`ՠ5K@[/:^ ~"RwDbkuPAQǞh,J!neg]o#eY<櫌1 D%eB;t'A<6ô0L|ѤE]˺^^> QA+MNV6u ۴#j'Q%P;0J6"s Nz- ] &.H+􀁩p1$;`w-ȗXP0F4DL iY!^`rQR05*[xOyq `Q}Q@DSt+Sx$"3`G[I VgESY_~(~YQ"m dNI$ 黍.}wwg)>뒮;< ںkC,WXkp`Hc#. e^ s@ MހwK |Y]P1BQD e@ %tMކFL3yz5|< hw6Q$mFpZnۨ48p@j298c6Fu 39($`?Aj؁0"ep% ~` Üe>F/~kRXG'R% fcv7tX^GHYTg&0 ֨ `f)thTZ[fC "lw hߠtf&= H/6mI^0Չ!K5hLmE;Hc=[g0:'M};ALz,ԵEnB$0z`2o6:W  mفf(.֐Dk sѦr8RV,0j;p310hPݭ=7f6c8n ȀśC|C (5A=CPb.⌖TV/+^P@\0 wks !$bUlU7\|5 RLȎYY 4Ye 7Dv.P,9tR@[`5RY~UgW$<Neg,"B- P G~W]i;۬ōbt*V`r(ncU Z2'$Oes=۟~}JKhƩhP۶ F066+tzN?__֖8/ceuy9"Y]tRxwZ8 뵛γ ?V6uK<TEdx+` ?:?+XN}^H\}+c$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\WUpU格d:hngwUOMtⴞ[Эim}Po}~ae> Z_ sd|-ɘ?MձphfrvOco~T3\'!?aOq^-'rR~{G}uSjObWlj{#vsJ.H5CE<,3xS2WSR[je26*xۦJnBP>ec8iIMld&4٤&M6iIMld&4٤&M6iIMld&4٤&M6iIMld&4٤&M6iIMld&4٤&M6iIMld&4٤&MdyeH g{ J/@Xk|]`*TW$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\}T"w$ 暽9Z/ޓk\Y}" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$"ՃW ܭ`ӺS1m C[~NR0JՁ{u Wba}ׇU+IUî4vuh VRMj/ZkٗNS ÕA;n6N'gnUz`.l[TsٔW|&;>'_YI6Q3rZ3\nfq̟uO(FKa 7'Xnbo6]E{~plP5^f$%WַZmlP*^3T{}xQ# Jq-wmt lޟfgJXFqQvz[$ˀbUMP߅EO?Tz &TE=mޕW@żb\ފPT0֖"V{UEZK2sf|1Sz~re?j6V~èP`! o&ggtm_^.i?PKM'a~R_Cz϶=6qV }rWC4?خwuܧ|c%~ӽC]Eo^}~vtK6FOq]qg5Zg3E]억h}hﵧ۲x} ]Lf/[~UP?&X^=4Áx9qvq ɿJ{.npmUB:y}+?;Ǿ>;هÕ_ʫKB5=ifG52U,:FAmq]޻1\3z()|'?_?6l3S۾Ku WvLIEgV2sl)G.nqF9Kͦo֜/^诟s t']T:srW&VA{*@,uIC|7a:]TBJBw 70zkyeג㦞:w3N &hh VsڗQX+҇FB845oh2w)oywpj%*-A6Q΄_L\r9*-19Zr\ԂpirIɠM.0\nݡrs7CLD- wf M-9B=jTq_`s_z( vxgUnնIh.zݦOX_L1bTI+ i)y{'KӍqov[nb >x%{iP|K1g8r'g|>xgn3/gt=}/ \\CYdk|9qe7~>zמ̛Zd[毷8!IiR&2`s1F?B̴f`x݀x0sFǝǐV]R{T^$ގ@dz[2ٛgE=X3~ŏ>"礧OǗnVJSm[290TQ"Bލ)0P` A5̩(x[-,n[k٦br:[5xɾl@ړNA()qu-Tmws trwW z@$3Lp=i>HP8Ɵ9r}u>1kQhc`\bnkKId%Yf.K# 뷤_P_H:eh *˨$m n#KCځ?,)?=x{dx7cYNN*=DA~c~W#n `\|=Z.輴Ӓn)bI7F?{<' ,)y6 3jgoGǓE|fo7-8Rxٛq:a^>_n4~̑䴝-j jğ|܆jNV^Z}ӕ).rӻ$0#խ9x~ϖaF:[_ M ^* l/gy߸.fi&5dzn=$_0.u8cv7fE~/U+j)agۡ:4,LNmFp[Y`5gM, ݭ%صdd{]@]m~#Ծ`t:067ϟ-͇vw>|ĂAn]cY_R׻} 2 /F}aU2kp60\,NjN8rԬu┥huBUtqC|g_ĄE8p;- k 8(bC2N3}H3F#9bH#G굟~|Htwps8;;bJQd-5!̅.\IFzUaB*2zrfϺy-[$4KuN5_,7o3'ȊM|ivG=[8Ǎ[?=y~8|J>=|1?u]aﺩQ_4ax<1E|5㰊ggE?2UoK^Cu/zH#Ξ ngBúQ}Je-^3۶FÿBk=g{ȑWioep;d ,.(Bd'Ijt_x0u4a ֈ`AisHapM62fd.%Td._Ξ}{Ξ<*xm} V+g/p'NH?<œkѵZYËY]TW ZiSVRV.\B -}$$QD֤Ы"үUذʲß_>:CX*˪3?[ B>4t kWpI/n(J%II.(Ʌ ?lu0ofq(O/F5Ay E$KZ5@xN(/̏IY{g>dHgC¡D y)-mh 0V&E3ԮI'l`iZVgo^}n1~?|o[nyqG?o&,["X8 Vl֠Xvnd y!A*4ꐪx$dO-ioƣb\5y$-DPLt" d)\SARJi)X}cH)  ;Pd́K$Z ]%A{p_:ə Nޅ)D+o հ9rOqba&Bg^hkTb}\#|VKĹ{>apt8vUZZxuh)7mJ4HFK&56J:8+x#rmvN~CwPH'c>5@]zV>E{f\&Vr'3iA#/ cCG+|g|tݍow[ik,sI˽"5NNi. Ҿ܊E,T vRsE[ LL}&Y`T_`fdp]C'=6.bn59Ǩt+}w.G.輒 7㳔'A Y/a28ӄC =39" XضU$ lрJqr% Bp9!@H7()7VrE_׆sȹ'eDW| 6k W3 Q%%G9᠀%B*f0j': 8Iy.9eWsqn+> U3Y6G@' `2(\YF΢4TEp*yB $,cJ8H@I$d@P|$i޲2U#gO>|+!O#+51,D.lҗv " ΅jUZq>:IG=ټedTNhE &( +3h(2'>f(8~]ev9K:CCtQJɳ \# '}^+[qxzYg}aj['+32rpQYdɋbpZ .D`&nHkA`>b&NqQ=V`pGPPC}FY9X=_T#jҴMVņHC䉜MHP+gEͷwe1u vwi[+\_>KJJ~+ejQB&JG_p}Qa]fcy SpN|Rf$(hgIA^t,~X8wڬ0^#Iz6]4t(޶;3uZ:򔎎"z^/_bubYSzty:/XEg}A ^AWx0$摸pd@qpE'ԋðD-P?z0B,?v62VʑTsޛ t:y\FE暠k"D%E8f6roX_oS{4$a:&1LI)8*| gU LFKʜy*aKggWyLc}꾟Xxޗe[: LF3}<#N]];>IO7q` %t)cTSq烄@ l+ޅ^T$ hƀϠFG\%_96*D_zj[ P&cYaB"dN*a2z!1x|!S=Cħb:t#"'n:_LD|>v|4<_Ϻ6{p_`i5x{k0$LFA1Okl 1~z+IuAjQ:t<(t:z. Q@F=VT9WȹoYO'yvոz] 䎪[tVܡW5',fVT,xU+DaAH+ B`j $GڣؿdGڣ|R"Y&=ɓ2m@͉o*3>uuIR(IJf\VdRײw@(8,} k,YTƞȹ{n6$7?5^n֔{/ ?~րz7m}]n|!ALa//.Gy1SX^ !eN7*x#K t0Gxy8^NrtP*)L:$rYL餢KM\΁ҳT* /-bb #dREf)R$'s%WrϧWm"-aHӫU\vxY0Y U'[{tr=}5yS31{G׋Sx'ϭp/;I:mJs u=N `JECjοnֻtvH-j,vZzn|w =z^k{?-7>{~2M]{wyB>mk˵ښ/3v{Ԧl%OC{u'ͥcVX7S= :YGAP} `%o v` 3UOeעo*QDK}2c,X]&oUvI*d%#*{m}&.v<?|zc҂PR7siD|ɍ5*3& d>0900c8 :k^Ћ#@LbD&DRĄ#i oKqzԇ|,󱬣Xc6g6懞K1vR:gDM'e .#th;6!L) ,ZfCݤhC`%qAbcc 7s+X[2V#nXTvCe!Tp9˂kmH_e wOKrAqkPBRNW=|J%IDÖřtuwկ\=ɡ@pے)JVEip2FƐ %.쭐Nn7 䣧a(ƞyMTD DFɶcf.l5,hvb[܍ؾߋ &dl;v`'TT\1L$L"1£CnE,[,v ,d A0išٕHq$ ,# dC&>9wacԗNL7[uѶv;D㍱H[*l5AR |d[^JbpE{ԩl5zpRHтl$)(0js}0"@c2!%ͅFA9eDlDS ȥMu%k⢄vqQ.p6Y gP䈡MN6xj23،;k2!͈0n-]+ ȳ*Q,1&y HY[.T(ypLeFͽ EEmsrL&!pzf<RvYaӿ{1,Pn'\.&\/J4Β>`D $9̕-RKp(`" c2LpZiiL9H"˸@olggpC}3QB㑡 eAhd]$G]˟tƃ<o 7w<70AyǗoiljɝiӉޭƱŠ7!5P} UXME^߫ޯ`FpyPMZPLn\*Z-pEj6 wpbʬ8fYɸyv~Jpu?j3ս6 =jݩ Jo \r-n \o<\*^#\!c-+"m \r**t*TjW KbJ3aVG [ Ͷ¸l뀋CY`U?tM5ƏF.M;d_:l_ȹ$u-&&l{B.mB`ӅJӒ)a. 5>Bx#P^!\)f@lvEsB.mBWJvpJcퟔiMxzf!#{2``[50˶I,#l.>i⃰P؛BN|ج c+ͬge/|Wo߽۫yNsiupϻj-lJ !k;Q۸SYó1 SWqE/,AU:ѵ^' n Oȅ4$*/AҽUϪU/^pgi4az!?_֗A'PǸ~wH{y$Eſ?:R:V#ȰF'i@k9hH͌=S:VtiXC2\rŦVq33d,Iϑ k$(#w6K9]FHvX2-W.hWBAZ㝗YF+d2ǔJw\'Eҕ0D#JĊy3Q%Lo{w9^g38I=ZbBB)"D]%H!)EP/??=ǟ>}9>OҪ'Q %m2}0 ?>|}z jڛ6F4mz2૴kvTsk N!~m/ 0TI%C|$S׽XWlه0yEC_SvAYl Đ"/lާ'EV6qQ % %C!;m bTL&RjܮHsD0;C=S̫&zYo57v$TLBb 9P3F$qphn9KL\wv;l |Smk۞kk2;=+/sU%%᧹g{00VoQLr9"'iB!WmI.?V\]LI6 D_ S;tKش A} 2[sWѨƿ5+{Y!4 ^緋=a5^O]:inj #7|{^__KJq 1*Vii4'[q~֖^ tSz_ [k~sg9Uͼ.iϥPȰ3 DzǖP ⩶!:hsY7'7j=-"e\.XJoԎ"/Fc*|BYYuP BTN*BLBL'-cs͓KSAпv϶Eق [٩'174vA5q,7,bFm bLV,:( @.쫌C@Nr[ܩUI^yL7%}O+W5VVmQbe*̳N8mlU5G,bDx&;D6Cτ d6,2B5rVۅWQĭhH2dR4 ͽ;hS 8X%`^EY1f^Op-- z Fڝ՚Fݝll9h<((AgxTJJ  6/$m L'E/d,I m&&GH_+,g!)/Qa'myD6]Li"|Ⱥ‡- 4- a5TFFǣJɁsϢjIM(piwўx̔:/7E:ZuC,5:A/ea {;_D#䑗P(%| qByM;GC ÃljB ༛[LQKי'^ dZ:ÝL '  f1 nTw Wb͡7-DhܒqQw;ӲΰMjn5}nXMM/*9ٯf-Ba w>+.12^?^''^C|d0zW k0_-?/ô DmI%j:h+!Ah oXQg̿/0ܚiԔ}Ձħ9!zYa8$,v2ēKXJXN /^Rj$jZaUY#᩠V(XDPu bMK1knɎtd;u%a.L$(.!n:[ =ϓúGwV-;ΗO[ڡC羀\m y}1]b+]0+Čxm**c;ZPl%aU>D*/:YW5H]ơg2Ɂf?F pz₉=L&$[X5jՖwZƸ [W.6{w^ wo1aD&^;`Cwh %Kt=8k,84xhda@PJc61ޑ"#""$"⟅JhbF){G$Jj"LěcdŘbjFB[? *p:"ЂU&-X'S ;G,s <'oO)rhٹϯ痷wBˇÒ6 =p͜P){t !9Yz+K-ĚA%\ALYq V\3]5PcNI'"V^#XYlO0ƑIꝦMg}=f7u[õw_i}ZŧX:;ѳ|+RzG$MZ u쩓P7)э$Կ! 5ږ,=tn-wbtd& ޱa L7bg#+KFS}M[S֊FL P]A+kׯo 6 S) 5jaq$~Δ{޷$M.#{J3woi3%!K9NY.}hu=tmm PQͼ9G^sUEʤ0,k#:e혜}q i{'Vɉ cM8f!*̥5?V4\RuEu=R>[VߧtZ:fs J!Y#RA^ޗE !bO h :vB:ڜ'>֧5܀:gnotza=t>icp;č{OM& GDZ'`׉zp<]AQ 2U&ӿIt ;Oиi"+͟h߂P6 4=[ÖU <@B?`*u0 1B .y}!=|5w|pl>y˲ Dߓ >lPRAwFc8oѠCM3 !Md!|}Kp H &VyI&TU= #c0>}zQ닟&颲b+RP5阑U>YB76ֺDUˡ WbWE:k1R59gV98SZS|f瓭=4D)Gflwݐ^ȺVya赃ЧL+Z3m JowDJ!Do\߱nSݸa'TOꦤV,UW|K(m}iܲP4*.Ź`Rε\1Yk@Ú d7 - n_o՛&#WF ;T:u1B̞IVӟiD.<~I.Va diȠ`*YȤ82) m5VȍU* \&hD#b&@^LUUQ`- BlQ6Uj[N5' ]!?ź/9Nie?̿/n^iYu}Dq UgY<|Ң↯&޲ m{ۅzv}skgxP!whMwE[uj@hmԿⓈ!6= ,wr. pnoerwr wwawޟFqذѝC#C>?5? P:g;.G&;JzhaaϿ\Y%u#mwxD۸ksWKWEeP+Y{рĮF+;~+ ibȢ$X'r2*RgE&~E2Th? @?֛m*{uVW6UE%?]]/KT)G(Vy?ŮE%O@T xSskKFrJ jm}mڈLkMI\bMZ /9Vf >|i|H-/!旿aHrq-6!i([$iYr[SX_&(6J>HTW!nMp6x*xEG8Zĝͅ"Zq ( @F碜AmsEKvԘu;|h'F-8$j(Afgr={,G" 2z7&~3TgDSuv[%ϴNNvqV^)=M8w&>ۖX|ӣ 쒇hV\\QAG7% C*Xɱ;Tɿ:(a,\L0!q{ɿ՗0Cl$0+p:©"_AoNѲEyRQUw@ڂyN4rS:%ʋ{\ݨzsݤ] ;_ԽmWM1#N\{ LkATQY`aL8ZXhF%X"dk1a^C0hJ %eGѩWv v Nš-f 9$ܦv!H>@D=hmmi;t%jlwu˲m5٪x2:GbPvdQHW4ThxsMV3ZfyRBe.9QO8*A̐iD΃^$o< 'uzb4A &{ *%rCN̲}hMa%ڨls-r))LG`>/uZlCG>~ .~; .d'È7!]۫ނG[dJI{b`3PP -(q,Ϧt~3bk Ai8elqU;d\}VP7kTI-b'r}h@Gچ9 lb̮߳[fåOL,e)OIwϳ:]]\|? ?T=ϯ8y9"Öհw)T\mq~9m%gѲ.kj?{OF_)K`]2C@qg&vz80xȒFXTv$[ʱ+S*|$9}SV12~vs K`]JdK>cE8\?qLpXENzIGEN xY?| }cS@ QOxac!=smÁ5HuçHj~RVfXIm[N~8;3х2ܚ' Q( sBh^Q_a}1'򍅁/Hkz˺P.k uXߪ}U%q^$W{-{z{!IłlSd(V ?+}`J _ax05/] AQ"&Rqesm *Q( UkDrB3Ļv1w}8!nK>4?hԽi񢾬{&rAxN&K<&+Q KגzX@$U!yHj$iguq:/M<1'"B^%i]9H^w{R}waf` jֻmXnQ_ I_~n<ݰp02F?y_P&a*W|_F/~'o݄  iKB7*ͻ.Feu!G;{wioޥ)0$^}_p'ë <0{06c)C{rSlW{з[<^A(},V><2Liw,,G6{DlrUGiqS-vV *'e.mT}lΠ_xZ1)DIPWoW _w`iC& pJ:ݛ8Kn3ṏ|@Q b2A:X?i$TRQDA]QG`S3myd=ͫ-E< vu ֦)jL#y}\H˃$0Iİ#N8FV{ Ky> ӫѶn 7\?Gz|PJ-FaB <*(܎(+܂M:~:*s >MxE6SZI)Jř'"8'TGY.:zPauR9(±D@L8ICꨈ`*8q{M좯ݬpRf '6J@KW@ Btg?M1Eh Z_? vTͳDjge)}pqj%nTgvq|H2wP6yq*yӁp9_l0GuLt&~5Kw\wo~ Zȋ[g Ji{f@HGCp  pfa^|)^tˬU Ȏ$5ui)$[Ŀ7Ӂ,o[VE-a}ԗBrTp>nVH~QYy('U ˚ dǿJ?ո<\ *2h{+ܗ{k`3PS~HR[O{el'Yja'a'a'a7>dqH8qjBFLd3"+Ow2/z :9P_gt/cxQ\s%7N* E/=j'-b,;hU~'Yrn|ĄhNy`Ex\MѮxEHmT7{OճTQ !vV|gUW]aWtUqXgWψ] 쪡64]iT\f m$VXo]߶:ij㜠 .šT*RZ[K#3P{n2.Os`6; py2zNrwe,:u~?L)$GYS.ay?Fa6+W65 UNE?([hSO;kHE>ג)E 07^Ɵ*UOc)c#c썽)IQN)93Z'韛F;Kl; 2)np09sH[ 2\QC\X)Xrd-%{KZryv]aW \Jv]%h;=z>J0.+`ΰ.U®m;*{vٕsCx0GҭuW"]PYrZ x󳙚D0r),6G"R " ̄H)F;7\o107S)M$ $g8jZ.lYAu+s,=#BQZ|K13\KsBrZ~1cUׂQc魃vN0ao0-]嗤aeB[Kda1^O+mDF 16z´X(ƹ48 e9ORb&$Ie ǨU1kQRk^jF5pV2-JT׵n4tIϥL1|:Idt_䯽:Ӓmѣ;r$<#qP݋ZRbIR)#a脦3@Ed1(/Cs \fKn1:r:R'Rr 'T `vga,"BDmp28A(MLES*/m\G#+Q,)$ a6H`","uÝ=? X=/Ѵ !YB*H-R`;0DfH9肰֙d;+#}CRN^9+2qLj $ 8^U6*(;(.+G8_=Y?dC;S~9h&b2~^*$)Xsfc򈜇i6\8C 0Hqbu15J0M>ɬǷ.M. )Q0ZE4eLfUuc 8C` !ιb:f2 ݙL ^vB_ջ(lqkrTa1ZjkX;Ba0RV'UL8n*g QD0 {R=_ۆPmjZ܈>0ChH$ݫ59߆"j+frlX^NӔﷲޠ|:˳|x}&g'_ V`2l7CCc0^=4Ul isՂ2jsK| l-՟/ĥ u"z>`_okFꢀoA,D"Z)ʗ5qӶ?wCIX˰KES/ L;PK F@Xy8ғS@j[Wm0哮[W!"&1:IrB̝1:ǐt>mJl(GY gc"IiR8Gc8[ dC*n-őv9iJJpp`A ( e5BI`9I1δε\dV,{/X`[;ӳ\4 ŇE0*b yаR5As兦FSEgScB_ 53 ͞r[:6Qxu-w( ڀ-H>V+vVDY:t94ADepX}tfP4TJF9D$2 RjDK!C*JO,o2Ίt}^;D8X(R[I\d9%q,)bIbݓ@$GQJ5\(R`/IUPQ'ޫZZnI{;|ki$R9Yi L)HwNq0.bƱ]emrX\hj @M7iG)őju!*zPN@&,J=RE*=k E%>2(W m61M^C7[g5b ZB&&alɣ=)c:ZÐuL9:A&I:?⎠I^4ɫ&?BJH: alD&Kh=d<_yAM g~pnӲβXp1[uw;}Lr!y:Kҡ*|R*|y½>JJ,Ufo*0TbS\] KwGW]Uj| 﫨=:K)أXSrc{2{8XX{xvR0yp;o@ 1ڳ[kkn+GEFGĥ4TL&Uɮk=4qm*1C\-濧qHJ̛dH36A7K\A2,%ed1wclqo@u";I Y7k׏ eqVQX;fB9]pÍmƳ޽#:lUR6RS;!4>Hh,0ƭc0#?`Gwb 3\#ZamtEW2a9KpҢKEF "2j PR}$d4VЙ 12:|RMDN x2|sL-ߑǁVxҿ^60_Mwhqv9oXeE5?B Ɲ1N)(tDD#զf"$fTUru% UD*V;85x#zz<fy[W_?}SDs 1K Vk.ҤYt~L*H!pVI5klML %B83q.gTP:;%v)%1!dex*@W@$ave"Y`Fu.jGXY&Ɗt;K8*ുd-qe h7GZS781`(i&ΝRXAB0k@=/Pὓ]fB_!1H1PxQԀ-S, ي]V筪P9|^k\aćdg9b'/s IR4Ix@);NgP\+bttlum]*mVټmv 2m_ ʹ4k>G+jfH)}wn|LޔDԫw&8.1JQ|GiIw9L^YŶCZ/B<ҺgeMGZ erX DI;/oB' JuG ?拾GVWQ#,IXrA 8zTiR{ƍV `HR Ke \g@"b{C?hd]:&'G:o43{{,lN L~_>qS}4 _"5dz͋`}^RS:<\E*@ x!R]F=5HqfyTK#>"9}K#CVQcR !)Nz'zbT}A$TCUP YfBʦ۴)boQ.Fȃ}I.: AL@r#%ӯ&[kIVp܊KärG+'8˽?ç lX8JfUAt]Ry\Gdž]#tX"$S,,a(,*BpHZY:L*6) * dXx:D-H\d5.{gR2&΁μsxp6iklqK>:)oh|j{]~@K<7Yj{}> g_,{^‡w=>DJ SiLz67cWxKĠ %~hEjݎnwwOs8mwͶCjPf1[v;_z^i{>[^47C<|{:^diWF#>vj.lzhnM9?/JuۉϑAenڄ)+̭J|DS=8n.i^ԠHt6 kx;li~Q+ql]쏒>e7 h(HH\%fH6* 𶞙6IKQڔ}>"&F>vʟ_voȥ Zg"%`V q(K$:gZh0VU}f4 ېTFI/br"!\{0 85]LZSir;skҦg'kajlS^TMոD8vۆZ G)vFFoIdqr,Iђɱ )cZ"G 3n$FgڔF$m$LF-Et)bLk8WIms@jX-63k ->-\;͙},ȸY77Ӱ+4n0}D3[lKf,xɘlzp*F%W'C:  [PNKSkm(dEBb6&^C4Fە4qlsHkb0PcfGg;*d4}:DtV$LKX8fĸ/Xw.+m\8C&dȁXtɐL&H6p>fY ]58aKKIM"VEE,Δh%60 a1Ȕ\xhk) $GQuw;-rV7N $'bD.vGN0btC(ť+[j췈.=jOvq\:jvQvQ.vvq׌ԐMhOM(1̈́5 !#xOaVbݯM9r_)FC(nQbk$6N%X0)VFwe,ϕPsk3{;7>S\f6`.!4Ѵaj=a1x.bX|7V T74W2YjJrV< РdIGOl*cg~,us?PRJ>mqg>rB!%($@v 6'r~\0Lg*: Zg%GH\X-P&e2:&΁!BCM(T~dR?}-X5#[no߸/+~olZbP3zr7D"s(|p#RR,2O0G$dlg"Ɲa ZG#9C稝J4߳1ŠhByT[Ŋ'M,j&*iC"l3i,#dA]6j?vJNc`1Q"GL;}i1)9ouZlCG P>j3/಼BVK5xQEo +omeTD)ІbH6KC0Ń0x|E[91 L,0Y$G%Zo=٤$"I&i]uPw1̎-;N:( (Ks2IylԺH?t{O)݌W"IR\V6ד<]\gY]==O:~ Hy w5M{aMdKKd뢗]麗N5vvnsz44l딖.Kbp\h$]`Xs=+iW gqyKG&'+"_m}kGnWrWE*ls1Zy@CoypZO¼6(EFÇK|m2yJj[K7gn0tH}ܚ/g  w;Gͅk6B.7 ?]jپ>!v_\]4hG${닸ʚ=USs4! V,Lw3(,H(ka@qvYnhKMFr)O=駲*j[ hmw`v鏌G6̝V NN0 cBpЀBXc 22D1NF΅tfNbίa f8f}/hUl3Y|ö? 05o8}O7|1Fʯ_W_ܛVOMFQ ix Ou4kN4ui)^߾No?HS>͗2ËIsro'9;ܜכq:vbI2$iY~n2nn޵5dRKOǺ " ;;n<: ђNߓuH D Jvv!P*Oe{_ Qz!y-\_Goyq$5b+&h-h5zybr+&54)+znׄ/nh6CMDtIM'k~3? +G(JA[Ƒ#H 'J<(!H1l y'{~{nڃŞUb8Bh,uc $Ʊ>zsq'o>=_a,cR !" H-(bQ0R#hc`:P{]pxu߁0@ NEwyǩ\mbW)7hlj/Nd.l `%ah\UMEa]l_ث~ s{-G{l睙|J-\p1_?`)N#KW4r7O!]j۞yOy%LA.*@YI䬲7șG:${mAջ>H>»£J;µez3V\}oYOlZ^pm `+h{Mw*I^?lk+vvګE II|aEmugW`&pcq=՟;q83ťgqk,}oqF͠G|uhrxAuhjO]=JzU몫ެ6YzL+ S3*wF]%j껺*EK]Toe͛bRm83jrrwyggܯ7i:}z JWyuUZLѷY™!,y5u^MYa`.a+o!^p `4J#k4ybӞT|TG:ޖBõ)ko@;߿9HA]% 3Q*yo6Os3>+D*1^aMIeȣy4ògX ˞a3,W#vixeQv2,{eϰ=ògX ˞a3,7w=NͰ=ògX ˞2reϰ=+ ˞aٳȰ=ògX ˞5Oeϰ=ògX ˞a3,{eϰ=ògX ˞a3,{eϰ=ògX ˞a3,{eϰ=ògX ˞a3,{|Ȱ=ògX ˞a3,{eϰ=7f 5=7Ȱ=òc=ògX ˞a3,{eϰ=]9hl R0:x]\5&_ _+~QIVxVz:5)^{ZOjTM0ӟ>\,m.?BցV+WsíUHXg-]/Zt&S[[>ŠDЂXt&5%Omaq<*Q^ha'-C`-veX $a0Q)t$!  uIغI?˚W"ZRCɨ 63ׄ;s}[\vŷ,e"l߼z;i 3Z{Ѧ3 A8 ӊc(8#rE؄ I{&Qc(,<a8z<89kn*GiF>S Q2p%VIuxfar[-!sQ>-\,71V {xPu|ESHW$,u4Дc5FE`ޛd\|YBmص,#z"%1I HaQN+!ʓH&qH8a|g#^ZXai”rIDѳ"L( w3U:bXf\۟4#HEک'0D:04FpΕ2bW1,H &EVRe$ i "AH8V9q:أlUQ&,vQb1]^L8_l<#;s~g \T0`.7}}$\`M0-F_\UaJlqMI&ƅSf2k!q7^R]P0ZFESq+,& !σ K*}3 {~?l@0`YC(XK ՚g_Ä IyPui6OI҃/L1 _Ẹ4"if fkMf*T+FRAо[.}Wcf6\8Rp- L~*nSswQߞV*][}]ux9[B"Έ0x>8=.$Ն\%E><=>3䞇ChH#e#]srEfpTѸfhx~Nd>b0@SGm_rը$GHe4vW_HUxKU rg?'o߼{~~x};y &'޽_EO&ᷧS3Иw54M[chB|fWu+ƽ+>Tu_̶VD◫o`Ruu{~xI6s Qq՛(/|6v#*r*]J(!6s:Bd׫lG; k [H{ꅁ }b)VR8+U;꼲c֡'9\L\)2XSo w!` 9)Tq{wcW'; OH iB63lO}-wKZ%~U~mH uZnO^IĜ,}( tRCZ\>;yܟCHirƽqZT !N Gp- !u )֝V:d `faV@i6(҄H caFHskw+rvS0; b]+u?!ߙLc(>ڇQSȃ\:8+/45*<3b eagv&=g'⇭XθCQeM`lZ" DGҞ"&#%w*gEC%Dhh HD"# FT8G"h ATD!Yg)gډe aԄ֓`/ h 87"T 'Y9k m{ iekd{%  zG ͅqgD6dzv>o)j>7NX{yRJ(+5Ckc.&{&z&{&{ с@YcwDko0&u='^E :bV&As%Y #z!UВH(^jʈhA #(H8Hw9kv[J]iS =; g!kW/[mե/ϻp4Wӳ=F۹/>A#FJÁl0eȀ DgEA)m$EXHK9TA9KR?;h4N̮|1E'IJz %!D0DaUJM2wmmvƥh}yKRybҊu|R%Q5Uer6f?|ht` a̟:(?ND)9E"_%:6I)Mx9I//t`]Ӱَ1?,{?po I;e CӸX =NB`j <,$ѩ8ڑVvC-AKȵ:V4))Ǒyt=ӓ{GaO4#./qa/&kde?|gVea}ݵoVzCRR`+:V=B͇`͇|hl>L}Ծ.Hr2U $@A[ CMDQ1l{8RVOƳse]'cȷ3*lJxݵrzn*mkXw=;F/Q;jFPlPl/Ll'A-hy}WPRc)1 _d^CldTLIdEY 2uQ!x܏0ԇ YxdcmuT RQDȉi?Y^S9IZ6h=<ڮz_gqX6_YqVe~* AZ /d%FYXrW)S)66 ¾2^uVA=a[1ڗѢ ~fM{oɩκ":u;ӱ]:%%E`nj}Eٗ{i8XК/[n, `30}:>Лȴ^i)$Oƴn_\-;0+YX)"K.:Q`ㅱ4(y|ѳZ=Gĕt2 %Ô2PbZy\0 BZ,hnhDF9l@12HGD,(kE5ti&CY#[mΉws̫=~8{ ͈^tH}^h-\Vi6_qрP".(; ;{^ 'hҩ eΙeΙAIX &S1k،6ԽOHWN *B)Q倘 yY`CtOeemߛoXEܽj}׷>) QLTtoK告ӶaQ.|vڄeH/Ǻc]օ&;tVZ ek^V&FY#] *(LBKsڈE@Lks uHY"RrH]%x`lxuM/s`v@,g}?-Yh.\|wuDS{|0m-SWݛ׏YIj3j>|˫E?f_;Xȝkw>UoY rQ`׫ջ9"쾛|eWv+m=|].?#o\[jiݝz7CtrVKn+P螁WjƬw$ުzKM}|Xo=*sՅF|s4MGNt_ ]e5B ԁҾs.XIl貿>\vqُnY' En9f"[(.!,Ⱦ'kgL].'Gn?/_·;^.?$FEoۥ4h`Η%mZ$)g5QF{8/™ [@q1c!ͿU1C`#J#L2bcCm&~C$.a]Ϊ(I-ϵFG(;/ྌ!־ٶoJav/oϵyzoA-/ir7䛉yOY4/L軵pyBTUg4sREooG4R@hoxVec-EU %Ӎ2'g|@-%'Vh:y;ev׫u|Skei+Cigk3;x艹4)<#V&q6T\K:URC䁏^dA<\=I`/ WOW˗I/Y>MJ;mF#\Z3**s*sC+•rZyFpUל \UiW,%*1++m`o$nh:[r'g4y#Ƕϯ]b~T0֌0'ח奉3I)3')YdM2py^\L^'s?Kdgv<ۼL8󃹙\֎ӼdԗVa(uG É9wcVseR 3“Ypq2xqu[U@]*=ze?4|nu_KZ`JQ.`p]QXO֨`3~Zn{U'*Be|Vj֡L|*RSE/s1w sjZ2jI4vcU^,jVh:gH|MNb@[ (9r@]e/̶8J&UL! 2U $hBp )FA(*Nij}ژ()g"ogUb;'%h&%k6Zwu9_7v@:X%r>8k@Òha٘~6fCYIɫ(|iB6|Dem`ʙ_JN8؀l~l[{խR]Cl۪Evbb'{B8PvLH!c;Gb Bb}*~Mn.&gR&"~ i])s0qQtD{M$e9Ა^ſ\2Nb!iSV>PѬman>V6nCؑt!Ou Hy6Yuuɼ!微&(ȟ}o{gfPlpj ԇlOr~7Wsjtzݸd-կ |n[# mO/#z!;Z;.Ug]ZC,zLj-X"tՉ/b=b_/|=k3kߖ]lH("׾lw)e()1ja?AHkБmM(v>'Q-ֹҔ:0*ebQ!XQluV\=DENmΉws̫bW|?]{ ͈^^{"8R>FJ4ͩO_jXEC\T*SK}z)I'ʜ[5lQ鴙sf@sQX`5X0) %m{.hr@Lʌ, d!h9~v6j[eaq60J8WrZ/lߒbŴ(L{teoT8m;+vuiogMX&J^K; ziA/Ɏ :]3H- $#] *(LBK`Q$(ڜHRdf$@-R$} i0rgfjZ潈AǬ$xz >tFwBm~?>UoY rQ`׫ջ9ٻ6$GGK@{F}~TK\S$CRr#U)Clj-rSSU]z,lG+~ؾ4Omy?~ɛ=f^&іgG>7<,|i|p=S;6db۰=*±U;`g,s)8%*2Y|epYNJ2֠a&ڬ-]'X)_%i(hUEHT3zU$[#gK*8P]h[օӅGՅkosf72>=ӻi >?}ۑ5A2AGrJIT (SAGb%.CҴH2`2!EQRAC2% ەK9bfض"gaZ[NkwЇ'uLuvN蜴$LKH8ȸ ]'\V@,CDC&!#\FM#(:C*H-٭kP_GM b<]uӈ[kZ' a KL<$"(E 4J˫=v:1@HN2虐B57m X[#gFx"r|qɁzQvb^6[=1R&@c3Rl,YF#8F%ӋЋ[Sч0}xbxx U9n _&QhUG~\o /mqM n^MBJ;oq]8ׅJյ[\+nǯ@lXEc,xr *UcUh$Nx⹋g+n4c2#a9@5d/3:Fd,b]V̸e;ZC&g|,}BEx-T YxfAJ|<۽M\V`O U`߹9v`(BGGᅥ#̪ e# RJs& }BRguG)Q=W \,Y.!B̴sƘ%2镎“䙵k81%0Y4+w)!bV^ \(E}lyk6H'NF'VPhxZimnٰTJ/~6mz8DR9G֖l B A%MhJqgR·Nx[zJMs75hؐv:dGN<YbP#mo^߀\oWj.%~FSq5k&ШdvI&@\*|})zrPB~h08vghwuk>3Z!Х4/s.&2Qk||4Lgl"^'֛)օXǠ[qk?9_ 28|{tk GLQ Ϙ̵<6- CG(C1` 7*0Q9CtF8F(0S bxFG:ə xN+T$~'*2wu,i&;GZ+!N<w77)vJ|ڣLll4Gi.N4]nzh'nZ6d)T4dx@&ɵJi`,`Hm"D)g$Jdu#_ n`:Μf.4 ٨E4% hyfJwF"O<].pGOAhe<yHnIXirFkCymP\i7r&nnۖK힮`|&ޚ/g -b bo滭w)k5-ElJf?O\l3|Wtl3lCl-Hv9 =TbjΔ&lFLyϱE`ABٓ 7.p>*߫RbDz+b[8iuI.ꕾatKE>o鄿_|z8F z=) :aLETlϩX2oYO1zY#Q cB {ʹvA`7_x|EbO+zWɡNJ2LA$.do1jEKomrF@f$RIxfg|pj.nbM~Ք]KpwqL]NX'&.gx۞}?D|pTh2^9V~Z6y7Fq47|\4񦔈7ߏ_$yη SOiz~13 !|7Ƌ7?4[_ޕ[L}.wvwȧ?tkL"臗XO^巟'ToopT(R<O\(Ss[T>vr[fJ$4^Ukv EZ/ }v6s}tI1w6LR1cQ}5vhLOxY1h)^сr`=œy]цSst_xL7l.a{?bͲ"TŢ`O4*e&2\ĴP:;y+@yhקSl)⡅o@qXoLY4ҤJ;6Vz_X .\B cHI2QDADg;׊+e5TV665-765Wh.%7YmE;y>#ABGV%Fi$kAdҳ$V3N`E;Vꚿ+oުu8Uكqva®qcׅi% 7RjA;%WXY3W!W(یSENG+ɍ(yJe%IŁX6i#.t` (EI0FBJ%K@.=~sCTE ${<'xC7<^ZXaRr͉ła9A$8=ӈ2b[b \>6OJ@ϳ5#HA2,_r`9SB K$2:R69+҇v`)\`PY!rK HI:|֭1%97>2t#`t].3)rY#O2x{&{Moθ%0PS4$&h)g~:0;H ^U͹Oz ʑaȜDf,>Ns~5-H?\3,q8S~\{CX{?]okEҬwo)sz4i,r,5.[s[7W\=5{H_kwݏ3O|3j]5! +\<_^V]q zS.w W6^ mFWaֆ2Suu^r[q|jEuGɓ U!i胐5Զ" H+yI\AZQz ʶgFkȐ9QSLP%-$;ԕ u>LJjj+*$9z*9݅QMsRR~!;kRO qMUs 01Lɭ=̬mx%m4KS15etibJ3K*dz^Z&S5lUD PJE+?auT $BtC 5\pz紖)r52`}H(*THM֕u 5͹|ٜc1x469 p賉9 Pc ݢ}4X5Y:6S4uBkP!xk]Ik}Zҿs{#LUdzg&*CH2ȧk1؆Fɤ,QE6ن ^Y% eQHjȉH OΡj 7>%9 Yorg㛵P9sH湝LAF>d*Ul4hc| A(B{ǣک}- =8=pk $E5U]-+' \@ u4lXNԊg4ILvku\m4l<0hVT4x1ڔh(52UZ,M|%eMm xa:Y|'T!bdl(UShkP& :xߌi0 >ӂ*b,&r=w6=UxzSW11J*DjikI^T 4A-e+{=2.}K譅ܷw{$:{N蒲:KWv HM 6ք=dS)izA4^\]\EI;axk./ߝ5=`파!J e^U&ҕ|%;R-u>`r2DA;ˡ)}Iry!&!+@ wZI^C5QM&>Ӕ n{>_ޤM]-C*xߤEqI_$:.M/|Wկbo۱rߜ}_^~4C0͕l U?kCAm]a&2V^ClR dRI6_|b4UNi~Nqyf_|_f~ph˗<=ԡW/8&1]8L~x?-{w~uk#߿aj|XS0+7 ]RhYBCT$t.>smU>gkloֆ }К$'E*cl!%_o6#8}w3ѲaMr,c\&|[kdW狋|Oų~(V_*^A~:pqA@|)_T5m2V7ɫRtح Kkjl40+ȳze#Ǒգ1\mU Z4o(rsB!(k^W8MLD'$0ԺҦQʢQ"W8z5GxIlw+ <W>_ߚ>r} ۽m@~UyłƬYڬV>*WV}Mк`վ5M҄zY&RGe0FV%?[_ǒΆEEaJVs0y`b]gZk2W5F@5 i p:hye-+X؝n)*^г~_yX;dGߏe!eڶQgSo Sgsz[7SO<{u.efy))H-gfNAXumt#t#tC\ID$dx`MTu%E]IتM$o.U+Ni/>AϋU }t.7kʆlYl^f$~yWUBVU^HM!6Azu|6KC "yO_9 0f(M|&"VJ+W72BE]r rymp]kګՂ>EhW̞0%:m>=Ap_1z,e.1zJ ч]Fj X3cD=jBjg+:s;X3+'a WDqZʷ䪷))FrIS"vh#mY]GE|8߫b5c=3?OJ& Եe-AjScZOIl6_g^7׏;~>~xzuFM] V]~XI GOnѺ3lR$7\Y$_M"rͩ h*n>j[?Wm-=5~[{?OK秉1I%g[s87pkWY?_oYñB`?k%Qtq|E~v"]|lOZ_=mgnW/<՞SB)gZ78fNraMnjTRTSL55jz>#\`#\6b+Vab̸ BamF"Rlpr+Vz"JW}Nb^d+"\ZPc40j|uoEmcQ|ĿywOw`U>,W\VkqvD5  X#,+*\Z*yVfZZ;Ϭ ˅lYctqFuł3(r=UDf0[WSĕ H!\\-d.bjKRk^SW$|pr}6"(q*ajzp{V= c$X zeZ7uM%,^: f\ݷ1d+T.bhƎ+R)q5A\)t[ X\pjэWq5A\5(M)}' Z]MN,x:ɕ) LZƎiVigrF|%[<`hO׋i٣ߤkI7xM5ء0?+ފc !9?7;kIyP-o1gYFJ]-R Ī#5.]Tc51<wbs}ڮ/q(RKtF{UJWW[rZ#N6ˣa,OOMZ8v\$kXpF"Bd Z5zUq5E\2['i̹zΥB<Մs:\z&Kϵo|Kk㐰[s9=ޓ}XxpÛjˌ㼻*λJ30Tӳvߪ(pEasE\njq*q5A\)XWb6֕BMW܌ ,ubԉr0\/ 3TzY w19U@m>nE@jC[ r+l7Ln0 3 |pErϞpj;X33&++< X3*X-q*vьG4 `+A6bƎ+V팫 >3}W,e ZuEq5E\D9@`c \\'r*}B'VE_̞UoRN `fT䪡Y֘N*L\WzАX0lp,r7v\Jcg\MW 4*#\`%m6br}蠶q*͸ /]+qY.\pEjPgwS)qK\zNtjQgVbqfYK-sl2# ^I'ǂOM.\<9R jzҨٓ'gt*(-d+}."8Թ&T9 a"W|+;njq*Q̸4EW,W\pj;X)dNjC.;UsƎ+Rctph2 vhE4VOJ砩WvϪ vOw܍nr+?J+Gn;θo/2U{ZW$K X-Ujq5A\) pł(sE;Hjqjގulw'J;/JAӥ}NN0X/L>V%&L?pLJ=;4z'eiҖyɸI jt *jIjC$?|dsqZ}d+;!Ԃ$2 *q5\b"lpr]6}bU*q5A\9MVK$J Xf͋բ;H38E\yO$Flpre6@Jmg\MW`N v"xm XPg tS^1*w\= \'^ܡ7vSȬ+Wn}^Vg+eu6\Z%q*θ @<Ɉ[ Xn>b(ǎ+J9jNq&hRB!'|t߹CѹӀƘ Sajسtm#Oo˅[8sȳ:tYogZ{H_b;d=n,vXhasxȡɡvC=zCJT!uVlJDs{ȷ}^3:9Fh=N5r.\Gs:ʠ{9=#`? O' _'(++~FtZUf 5UGY]Dr.b>`f v;]|[%UzҜ]4*5hw}9]u^0]D |J9Z ( oIt_xRf;⧓ժL#"6lZ9fw^~ ? [S/0}@mYï-]J7wM:c :dXj&UD>ڿ@J/ii8.jyY_ X:dw0!Z z7Wޟ1-#y<yQ~|}Vg,͑gPOmD@/ԷG||Kb |)Ԩ7VD#JҦ@VTeVfkQ,͑߾ )im-Uल8DQ'i.f$^f!)фŠŦ] Y dn\rQ$l,` &`ҙB6P=VRQ]J u;a*I%CLxeH ," XVTTPtB[Z y9mxAY]7_DwHX56ҧoA2Ɣ'-s#ȭ&t9E"uR6 SKȆjH,ֺ8R`>SISin+CQ6f-a)-% ȡ2" + ջZ(Z0*b*18o420O-`&7SXI2} 6M(pe 8d, |DEx*dM'(Mrʴ] Ge:Ϊ"(X_n:5(!ȮD@$ 6CA5ad\Ѧ!,xP C9%X !@YP&9ih$mfLV"˄:}n*oMI9$(X,xN`&BC\9N ufS.K@` Ne ` I&- pY3TLg2B@Qā.(ʨq` M:$; JQвA7/uT* qJ S ݕM( ?J]FkD5=( E--id^>թb~)QNKG%AࠅjJUV+ "S۝@VAjZ |[4q\Ƞ֙@~r@CńE~ŬQ@q|cL:C:IFD}O&X^,ee&FjAuPqLJ`"K*$]45-Uc똂[;&:!.%) >@HM&jZ!T^eh&Պ.%5G0Ara^F=ԭ}Qx3$nCe6s*Yȩ ՏDE3rmE7Yr&e5D v "} ӭ?^SBGUD|ҷ{%}+tm e,"zԥt13>Ť}(T`w}|PGt3 } $$BLAS{XRr*am.FttBtX Lܠ|)B>JK]`&Ռ&Xj5YҵD= te#Kqѯk7F!t&% I,ds  "XtDd*jQ ENeQiY{ZhΣDjAaKDogNx10-'Ez7$%xKTlЦĐ 9 ]ndh-87h4)Qr3*TAZ`АT UڀDzEPBz{t}V 6B_߀uEXIBi'W Wh#G Jyˈe=P(ۢ#TɠHԜEb,RuJ` L@ Qti#ʅATz hNLUY327 WjA5iՃ*U|R~Ϥ:IAL\сhaB5gm}ZZ-Q秽[i/~c@TBmEwҘUYtڠ@8XA¤ Vʣ-.h-L4EhQBb)A( G{|t '=fMڃЉCXm(TLqJ*kE!8Xg7vҘ%Upԇ X9eT$hVCM.8I8N]hEWrD4e.?"t-+V}OAA(7B7{qzn+˗fIҥeX2tWn>BeSK E-1n jP޿`X{qg[I`X|u rmq;zvHןkYz9ټ;ܯfw?9Lo6#ܡ_y 9o+\_mⲟi}⤽9NjǷnщal {W7qu1c=,=_f?k/Ic%I*{VN vr78?{'P͋tN v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'ЋuAEfpC @Pb'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v\'B=sromq$={'PjN5@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N2F7''P) 02}ݳw%yvD''v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b';^zԜ;YjG=l76}εE^_m⠝ge\rfn>%Ua6%%\'qƸPhʛt@ es¸>AVA \f}qΔ^]k(C>7o.Nů^qtfؑ#EKm 5!RKPJZ_.?:^?]"lciKǼE?A?7jjP>mY mn'a>cZ,~DI>#@Î?5owrptW쒡_f_2$27$g@?,xE-J_|yovzp N*U/NO6=;n H;̀V&o,^woc!KncVBmVGݼ:Vw\(զʴ]ݛgi, RiMRipKRihW? exfsY}K(5A3+φ/} Z (LW/7ѕlJy+BW@\xtwitJn/{#mhSnz˾1{?\I<1/E`f ̜s0X Q$$߷غXuLYIn]dWiJIe:x!pGEqFɴiypqG?*z)p /KI\-.g*eK^ڢ\b=w^uˬ}Wc7~ n@@m~or$p6O#-#GoyLEA(Em~f;P?Sw/;CP Z?I`/g,⢺n"X~* rW$p1+/&\UU҈]}0' l+JKqW$U9O^ߣBfl^"wE₹tEjq芤Dm(ݕLJyA*Zy)])EGߏ{zq%"iąIOj/pWsWz-H`8yEj.]T۵_ `]} 8 UPr+m{8zO|6K0ٶ6Q8ܤxOM7JrWhq?4n{}.j xM'qy)R7)=f9,f+fM|{ )xO2W旚6<@l)~֟=%Mӵ"׊ J&q fm(l@xb:ck+ܾǛjQ2]WUHfzm_9YrR#oYXn|Л\ ZǫoT?6Չ{|Rq[_t!zB_w~k;[)[q`F()HU;CYb"Ι+Ȳd\+i-b<0KIV***N ]G.M7Ɉn8׾oPr&OWmYf7ip5veho$~wW?}7| ˯,K\N6jS*"kioG>i<^כF5iX?эy˙ӬmbeMR.&7Nݨ˵~nV~ 'hܻOX=+ZA-;55g} bW̪IuUK%U"f8ٸptf$QQg̾0 P 7d͔S,2_?>  /yn#q BS6XI%a5Bea/B7f#诖|ֿS:z*Y{K,z͐/3\Zv.#x 8"QKpҢ+uqF׵BF:MJE:1ǎ9!sdTkOKS@E ^iTdh-,A@WŝkJZe =rRyo9@8k&s#w@tVy]VƝٓ9N}{` Arwm;,xu.doZx)h> @-;w=}HOtvoo!6iRkn]Bۻ.~\0 nu;Һ=M`ELgZ6Բing=?6l=/ܮmo~w{Pq<1Jbwt<.Ѣ#p\zv՜twlʯwKm6o_Ju-;T}u4\δ>27wqJ]] M{Q;h6 kx;\ ,:}Rv%΍+y> YSvX1oDU1`RUkJ/6IKQڔ}>#&?36Ŏ_`ޠ7@C^R-3ke04ev.z9koDB&`3=dن2JYSLNx2#"9k"yVgOZ3"XFONύq~=YrmމJmrް Æf/@1p^ynFQ![dRY\9LYE1f-IzFg圓F$k$Lb^D" Τs~2 =c5qv{zX/B G+_s&w_izs8v4iÇ~o.8,Lޯ]M%4b*\Ŏ]إMÎdDh}bJWOr7aQy9]"ۣkKWvn\B<ӥ ')gzRzFgݸ^ =KZl*pDit٧@ c =^I.\\6agexhˉ".Jp~Rb͔DBpj@Y8cb ջ6Scp2?LJ2)J2O6݃ xYsChDǭ8@V& x+T6TTepFx7SBdNKQIEk1ݸ={ے=B x)W$[Fz ӒLn f#])]}O|p0xvXR)%ё2(Gi˱IbP(chiL\%,z)U1 e COhY8{2+okGz4Os9uh$ ʲ#wrؤ/i(]h.tB'k,*CbN cvs(Hj@tf(Hd>qk}!%ۿ/aqT:KBP jا.tk9h>ѕm5F.197>)Nv˷OqbT}A$TCUP yfCm5nIշ[ybT+U#zd{>:y"Q*<~tFPgh45e@nClj|Bה<*ɻrZ 7  -`g#"L%ɢsFdk13͊}FV7 >"Xa.gxm`[bokYIO+Sϳtou_n; %4[E JUEq)iHZ`l)l}iD&rjs?O_˥@lirr`3adU\N%>8sYRWpc !ĤS:G1Ĝ币,uL,Xp m4C pg\AA쀻P;\^M\|hxfQC9~=_ͼʞhx?hY9O Y5l,>0<1 ^kt#RYCཊ)Č4S{Cw[xgd_fic.7qsoYA+A5 ǜ([Iۛ%6Wͤ&pAvgT3~Td9(s;9zq<X|6- l$ @!Ad4:%9ΆH9i t% >8/N{.J;~oGSvS~]^ΝAoǗ r cb!d1>1(o1yxOSws oHZG#9C稝JɤaE4! *̭bM,j&@Z@ڀe{ Ѹ1!pTOZlf mfx"mHF[! e=K `?E[YJrf|+v~Hv$L쨛"dYU(d54kr7"c(LVd~MIO;g8=Yz{`J5 i^ {C[чoa\O♡sʳ-Zֹ^\u̷F~4_\Z62Czlm[Ε}8;sѹW*ܚ&f1lw= `UzݜQwk 5敜*[[z<" Si6m_ Ce]*{rLuG*j,+09]`l&XE5YrzW7T)idXm:iޡUeڭVAw0u/7] AQp2KD)ey̵ƼQ;C I)HFd xH~ǣD< zٿa 7ՍFT/fyp2P⎟w%'Z1H(J.`[@M{^ΰM܌xZ1L.W 46V]<w/h3m8Nmt^,!kPHҊyB'OkN2xH:Nl^0ӽ3n{J{Bxnr6L \G wT1 I6J{ hdD kL10r#_k^pWl]Zz1J|mKz]îaa(j\ NkgGKZ{|~tfG?(i4"%;ݥ'{RˆX|{ykZ=Òrw(@Yw˶P'43HVsUFf8AJ蟨]C]+Պ=Q፭`i*s)Q #`PoEȏMSܢ 9ݟ"}|v{Eaׅ7.Q=oe`y!VM/cLi8Z*@m@#T(m1P4lioQk]Ixhu1@ }O4[햽L6m׊#/?MA3`:Tt@MCxK-曕 { jR*vD;d0Q%BpGBÏI3&kHFgN2V[vԘ`3hS Eg<2:pBFr)+xjYm MT|yfj4˯ןǜLrEd]VdمlM(ݿ3%f>j{.~uDD?} .N3ԋPGV (`afU@/*B -'uiFbϝ za2OjC52|*UA{,g+#nḉ]] =4[IX}ymn#G,SFaDq"Y (ɧȄ5-o!v4)Iu9|05u‚wϹI)S>jKS$raw/׭BFt I/(Zqk=aE, t!*W9v=˂Y-ׯrD;rjnIF+$& ^qY.)eb"\RHw:=4* `pF,|\Tϴ0 :idPy5R8$P\f B=Jc8JnVd⹉uמ%2kH"I.,KJy(gboD~, =-Ѵ !yN qNl0@d /TfB9R}pRJN|'v i\oT/^]NϛGf {ozm]풣Qm23ѹ# iW$#]FJred9FB0V;5zM q◺ޠ~]O~|7WxǧӣyROS(:@ |x #5|h*Ǜ ͍`hڜukڜqoW W0q~R ԛ5K˯Dr\Bj֡2#F q.;O MT KD.bB )re釕eO)Jw{AY <*&*U5#dhȡ oOMfo(}mÌ.zvDu"i ϖ~ѩ(rInM%q90s!qbg?Ni ',XN=xg#S7&#Ыa,f 9͋yZdX3qRa8.oiwu{‹uRnE"M/s|{;!J?9?>_ Z+?= ZB; yb%JiX#ˑk^~]^t`M\ >uAoWqB|w6Mn[#|zr0s4^m\tbSl)RA"B%[R)m 6@*V) |^y'|rj%ҖjB8Ͼ肵!3EX@9HJ|&Nڵ>Ar9@=yesX4a)չ*pJs*opm[msy.Nd+H/ݕp">̈́)|%*WBa+j!1ʜ-iy+MT; 3Q8)M}q)ſnOQn.QaÁ[3HRx{%SUB) &*rE$`E8llIkƙ""rh’`B` 1hC59A2Άt{ q5Lߑc9M n% Ip2'!"PO}@FYk'6I ހO?3%@W.Ҥ1h/iK)pK,hiFQQy jVz#C\z(w=@ Te?V C%5Tvb J%o"W|{Szo~|?{ƒO c>!@޲)X,j)IR 俧ŋ8ƀlkfxǪ?E;t_*a"vvs5Zt̏Sy|`Ydۈ]nѲ[JVǂoJ`QrCBU>R1Z+ӴFJkQ;()Px`ĨDA:"E`GYc.qf=@U uU7T!ww׶EʍEYeub^| "`rϡπSθ?mz_Go?=lIݦbݜnv>r>|3ɖ^ ɸWb~Rw'^ ,&ۈ}zK;\]}K?}Z>{xkq-ٮϗP%ri ̱O Gb<6(RoXsn'Q!/&F$;Iv^[AIrBk2,R9y %dt:(alZYZo)y|Ak|K:PKmQq) Z*A˒m"]OL)&82"7nڔA/ p3;[^E34 M(#(6fѴs͢LKSZ75[73mnG~9*ng'8 ! 3^gdX+Y̶ġNMl  <GTla k<|NT xF{)qd[#c3sv#c; yơXH,<*>X͹VӋ7śl_hr;ݼqvm.J#bB0+SB]Ɛu%H"FY Ѷm"S!PŞOJV6a@%[X rELhub+sv#vӚMV1``O%t69eJ2NצR )YgARN4@(23C Ζ&e2+#cC*Dd oݐ9p.'tzrƾ b38]cDt"㉜fV;+R IrRDvp~.5дt$X`mdbNT8Rd##+iV{+!1"63g7"^f~ԁqq\\g3/9QE. xg], vl#cB*Ǩ#$FTycb_waxxk}o*q[ܐxGPⱲ~{q>5ㆴc='!6&ͫܜ8q'{|hѲ`(xw'.nn./$f.Px7 >vɟ?y_U瀧w7p_m1)rђBʾ0v)R9<(:KJDA6r&M'X=~ K9oȃ h,}i֤= t.BSh+[qr-g 9eNũ0J%&+@rbM .iHrs:~N%s7dAœHh^}(O ?nx[Ԥ|w3vw^唥@ >fIs}N Iޕ&=*M"s:3,vѡIS5D^ PgHX,ȃDQ1h[/)ڪ`x$٘()g"2 J>'%73gѷYq=]kۣWJ^om$˳}M%VoW\RȠuw$!D8O")yU[Iq]cm0!j$ИR"iY:8@บ;FW+;1+Xkg%պEE3H $(k;P7Pu*!zcko_ێw]IV24Lh:%h>z(- 2/{|j1_+Z3)/)*Ss ^p"G!;K=sX2xB"'\ˢCKFP)B,d m ^ܔ TjTݼ˼֨4NCM.n/↏C-)ϓK?\\<49 g>2܎7dVoJn]WtѴq5xn탥_ہiiiDǫa3ŮFcl@E"LD/bbu#;h]m^D6]Yڐ2fC:FD)e()1jahY0Ѷ]tyPz:aS/iw3=o>}yvɓfDi'Z s 8PSQ+z5Mj.siq]-DCw'+Zǒzǹ5LbGkB]f8Hk%EZHizz݁EP%{LP"dX"A;L.BXkb@kJs2Zj&3b%_!I{eiB&6d@IfV+@bA8FA"5#H[K53gw߱eW k ǍqvЎt ߻UYBڞP̧-Tiڬi_a RXb3@' |W~aZE k/8W[pFf1FUъ dQ Ҟ%;VVj9º LubsGq ԣG"~+k9uD6Jƕ@\ythq8#C?Br f|H&G.Z C e0 h G%G)R^`EڍWXxڤcYh&* fz1&Nl[ E gVSw|?%y௯ifO|ź"(K:ci{!.kC 9:-3Mp)FMwވY^\4l^?58X͉ x̥3˛KC#\ѬlB +IX=s,}R:Ⱦ+4/hmt콽*xNK&KL# 1E& `bTx]LL1>Vb YYʚJ}]*lgEy^|x?q4{=̔ ,-*%@-Bk`Q$,(P}q`xʼn4iLZ dIc5Na/*$0bJE:ק"&ic< q5>3frw&>;Ym_pnHHbAJ-#խ8WKҎ)眒F#2EWl*f63\z6Bπ[ xB>n(@*R2&O:I4^_.>Zl}G P.EZ+gۺW$C.f{Ab?m]dI+i}A$˶$s,'J49$7Ա=H_ꂉn;(|L^y3BT9C RhCbJmC)@.ڎ|#l|]V~E-,#%lRPyV$FM$gIN츻ux=ǫ_nVuhQYgLo ٨EOzfD-zn^gB}"HR76s/OF ݈T9-ӣ0G4llmÍ>#"QG4ߛ%?uq]ﲀN:-ëjIU saɪ,p4.jd3IOnnk,^ y@VU ' ) 5ѿťC5 u:-y5I4v?;`h|0_Ӓ]#;YaGlmWޟW:ܚO-c w6`]͛&-j~"ݺ ))+;[*zyWnv+:&F-Hv17tsm25JF aEeW[V,2a8W: u2£Q\-KI?X_&esBWuqNO)㮅aTJ*U)iS9G!)X{$*=sV~yza0wZ h:N?;6騀Mj"r=SЊ1Z1IjH HgvX,:* X_M/\Ϋۏ?%X3iv1`gd 05.Ft]~w-F~;̦ǿߕ75f04K Ϭ%i;x5/~P]jq)sA hUߖ8u͡<BEV,*;>!u'7.yͮ!yOSO9'OUv|bHU?Wv\Am{O܎xZ1m+:p&{a !;zȗ|.{CP/2&YH&@X?Ü>`m$'F%N* 6pL>J'UviKQC3pɦQ]z[Zkͷ8o2'}) [BzH70z.Mϴ Mh \}E/AjCd@-pe7 ,> TZ_/f1w1, 8MN14IÒu%ҩtBq=L#n-%[U'>6IOr$c8eh<28S8C7s?<Έ ڶߨYr0+:VK%fW&0i5֪݌!}_HY0)\o~=TGeojFwO `j(E)pES_4`3X$d4(CHsһw^kvSܴP7_^VVa̹?̃Y`vAB6bVb>s;ؚd4r&VtnvibLQͣF6rӫ9نZ;`g/rݬJ)fO~HXu4 e)+#~{AlTN 6ͽ:}xޔ?߽՛wû^,k Ϗ x?m?5W]MMͧ[L-Dn7|y%6}A ZI溜_ i(//[rr%5TA0GzM&?vބ QX$a@gV@Lv;Iivdx缗RzĢgjT^sc%jgZy ƤfŢkŎttLl+Hҧʆ`t!Hf tV§v.P5߇ E.XJ|@4N 2K3S!9sḨf[ux4#%Oԗ%!{ Y[ƬF$i6(Xuu\᡻dN,{/ܺ`W_Y0_LOעrM"#As"i2dcSPb ,(sA_)o(w ͆uLg*z{|V$%re8 "XΥvQ@煍ҙcف e90a"Ka&c,fL` }c: -lvq Zʴ툟yA%MG)B jDR)"sx*fP\Qߓ@$kQo+5ʲęב.CxZ%KUT0tme(!H+s0"x0QI%`\ NDyr!s{ ح˲p+,[,kۈIMƒr>< S̈́:styȌr'Q7G֝G`+a%q=tW3y(A7EyXֹ PUt"R Y4kZ2 ; =ytG e PGgPS"XnD]ˢ$WR+%d6y` 䉙wߓu *T!VZ_YÒA}(davZ .R,eVi,dT4h/,XbVSGw1fMcvI)Jx$dRĄD̤҅J6uJ #`sdA{1b.`J cZ1@:aI(2O*@VG.RX'L@$LS$58ihvY0#zD1emFo.+Ѱ"L"Ӝe\2!ȎL]d3g]!Ȯ]}%ol1L9 ē$❎i^nN|U5eY*T^iO;ө$*ǫed`* %|›r$_GɵrOvP%-Kd"&fF#D) dŜ"J˸V@Vu(Yr-})^m I~Qe=6oCRl ־+0m[߫zϤ),\;w(j*->t>V٪a'wۨwRP=,++y:3;^rN\CN#RDey6:΢+^EȚKKEFvdBZ}h2e#AXBg.Nt`\fc^gl錟xdy#+>OZ n/^lEݶsڡk㝇XYӧy_ ΔĻӴ`O9D#jW1{Q6%A>xogL7AN)8XGNsm.t v 0'=rr`]wR x2KEۋoܣ|yƦ v޲|&Hm@~l*(]Yo#G+D?jـc,0f/1yvM2I^![<$ER]Ѱɨ/bV{'T@.h/cmx?uG1&.60֓O >_~y*<)IFC͒_yAh]HvYznᲽnWm?_2bȾF.6]DM-| O۰2/-6, ~^|~6l;XLny6ՋZ],b}t<[;?4X@٠0 x ,q-+>q ?֛Iяklۥ-+b}Nqxj! d6:ױ<DhӫQys~ucN!KKZHöG栞0c>-sW6ZZ+O?~%BChʪ-R*),F*y m$s4 a{Bcrkz[^pEl@Fk4Χ(/)*z98,ԲM^籾ImdNumK41Ҧph-W4bw&jW@ ) Ui9uJipٷt a 9i^l JI"ʤ&,}TN{ETQZԶ.H6[ @OTH)F˪WMť0 ҤF'p2&)JJqaJdgg8\MFx7^JDM:,GRؠ^2h_A}~MAzK9KI%9nla$Sj#X7, eGKa?~GCfJc- 2DiRR*XR?!'a%^S9IwiId:za2o^E3z6%~^J"" @9'@TxU\}'?*۪'?N~G|D8`#`zv$pЫ*y_:92x|H st5axd7#7+ mh4=JwڻXl{u$eSx0.8W 5)vuka4PD!2Ry"4Ո =^p=z¿?o_)?gYe eeAd-K)e(mpc&I]M(v>'Qr4DA:km,* ]TǛ.;p% R='vxK[of^ͷGjڎ-3~\Ϻ{4Qɀ{xϑYTNEBjwtS';:6z "&(7)Y>NѤC6'^O.sΜP9״ұ`2uSJ %m{j")B)Q倘Z+ӳNX #ts_Iyf ۬i/ͷʜpW-nd^پ&=<D!ASQG|b?qOK!ᰁc@F{c}vڄEtO/'}}"Lv5S[i6;]E$ ER#[uꢽQAa)}˴6R: *"%GE꒤/;CAu݈3q̎[`k d ño>(Tu2[g} 'oy6ϗMvg2^;g%evUx'Wvp●tymo!6ďMl|%Bۧ~,rQ`f1z8dzlVFðiȆG6Kmmo|y{ܮ}VWg>:?[o/ce~Gny箷$ܮUsu&><-iyIS_f3w<7?o.> U`s>Y\xϪRtRXJ}oB jf^(ѧrQgg֔QP]D}QY#m@i8 `EqB^o/ˆ"O'$Y0uB"ls]@NsJJ^K>`ra:¸O(O,#ѽ{Y>ܜ;?<58h#ۥ4H`}#MZ1)XS FmP3^3Abb5WQ F ejg6~朲_ɟa/=tE15!䫓VWI N|(K*!ST]pDaV:#SdQR-sNmLB'1I%Q+Skڤb sbǠɖr2h/%;kdL=JuU:4Xh;BcA2㋇EZ\sly懿hi4_9b;61h! FcJKNQ|%8@o4u5T1Tg'%tJ 0` r vLslY]dڝiG== <Ԃ59hb)8mlR )#/mBr 1X23C EG&e~eV6&Q`}Lw[Sx*ؙ~<]LjzDqkIk4;l@! TN n&hyOVcqMNg%3LPHDJřBM6t1'j ULjؙ8Ӈ̯:0.VLK(E=.M x d mKv`( ˊ-w[q!k YfFFTCagq*x(O6ޖ'k)Y,*qn.|:'˧Gp*DMTHV4!iCYJݧ=v8T x=Z}Sjmχ}W\WM[ :FPX -mH&N2E*g|Tgi0QXp)iQ?8 ҕe.uiIʥ?x.2S~QB!h0^R> )ct*NQ*} ڗOѤ2iz0:>w'^dޅ9 c4}J_~Р ɫ .ڐ Pm6![X̤eYEK]W/]t]Xu2B `$pE1P<*_~Jv0IvÙgbq݆POX֥O(%j!/L[TNye,M_{tYT}س##Ǝ?mn: G`-+ uB(@U K7-qmb_jrEg=<3$/-9Nɑ$(ĔrI r%"XXe+_ rZǮkw&nrĘ8X 򧒢͆gf('ۘ܌[ڰ/JOjKs'`b)R˴Xb%O!`ٽ 9R=BN L\o'ƻdoġYM4l8vwȎ‡=sr෽sչ~vRw4S]td ۶S՟ل/W 7Nԣ4VSN'ѡ@ݻ bVRmyh"[01D[λ6gc}|$aISv|ԗ#脛vQ:%ă+ vhF3Y;g_,=q4{`1!:j/B"IH"4Ge!<]A_& Ҥ1%9Z)l1"ze} @ژF!̘{=<[`.C%Cz)ڳ/dG| |aՓ72N'Q^#(CP8v* QC6|'oƟT[ɀp $=;>z*̋%5 ,,x7ԒVڲW񭳊j>\F9'm*擫4Ưl+?f$U3r~8p"S:xqEk6-]] 6p\&<]?K?uqQ|s9?Ʒ7whv5vW'al?6g/'7~w`͝gMY^0J"[gY{ȑ+ [&Y| 0Nv$g3s,iHbwKKvȉRSd7U[2 +#^.Y^ 6hq?AXk{0ijֲUi4gg7U@ ݋A`a(*[;U!4or棲-SS^k` A/`kz˦P_ئ}ѱM1 y?TZgߒ:51Êo06,erpnt@;\UEA~]*nR"\Aj2;t\N{NSSj?lj|J)UK.+M<!(I!!iֈ ": r엏3qŻWv;=..!x};~ & (ju,PA;ꬎ^ \zQ0ਆH$ag5­6̠:yI}}v/P*s'gVmFaZS;޶;?+~mi0*d0;5_צ`2^~~?ET/B[ow5?(;xdl#I/__3?ѨN:0cl0v4"I3ΒUwr06lW->LG>l`_WG6spgmlOnmjwaSb~M-''{j uIkC9~bykf3x 55^r9)ޱY|Omo3EnWiAjRϲ#X}O:yg&B V'>{eٟv⳹&PxCn)SGQԝwfU9=-jY0n:kS1g ^moeӽ`:?+r<ˣQkͷr4ggTUL?M yz\UFtɓe`B0KLyz<=Bo[VlKykK-[Md 1E|DYbl9Tlln>.IJ]g( /!g:Ε %IQ3e$PPBF6,*@%4mm]:l?1@热ح&nvB6گ1Lh.,kSTͥ@+bQ-bJDj;98mm߳ d[ aYRhkf8ƒaI( xTm`ՌH "3(QǢ47ZB(KT<ʞ6FXs.j2D&B£)x:ik<аs-RE2h.Z̓+?Wķ<=rax{Eؖ2-S:.tS[DDp_[#"^U,"1/#*5VyCQ/:17vnٝ4‚gHL)se4“FъR-B 2(. c} d/b?'ۙ&ևrLS\S JH <$tZ"cf?9;wh#?V;_:;l>!Ĝ)9y2^GJ;.s2BJ>JBhTu)!KH9*"41rP8Ց{*hE.jǁKG} O?b1psXB((/ZiT~veOE/U|ALq9]@j|?OQ&N=緞u5̞T>R\) < @+*c? ]dEs#T12H ]'bϔͳo-I1+yV Fb#nV3\tys*g7)c(lя_*x] mHU•=ZN˗]A`4gP?96DGnx3Ί95yvMsmtrټqf\w׽.8H慄//m?ℬ r$.j0l0BЕ,")bAţ&i ~z1Wɚɺl՝2~$WjHfqv_#"{-7|,w՚N%/ug~%>tz7՛s_O߾B'\I(V`̓A) OvCS8lhC3欛ȸ)[y*粵" ߯uw ,:ȇ-7R*FfPG:M颂꯷@*T,D"Zj+SfYxĞ4G4;gs:s E |T7ߎ@N++mt:l>D@ SG!BJA 9l[Kb 2`|@2\gc;}m&؞r<5sZ yvQ Y -ȘɔTS1^ɔ(؅ غt{.[lc@Vƃe^@=|$.UQ0 s)/RsaDT:DvØb rn۶չ_hf(@icp^CE,82HPJZ&6Av )*vy̕BaȦ[#:A,X6jUy-?@JH䰓Y@.䅂MLNzlSB^q~.a0\uwive>]6JݓCzYE%fJE9 QI\w|XepNp*_%I^@LcR+]pIȂ"۵z@ 7jpՃ Z2z(պƸ(y4w lOlbvWV38,y,1ezQ1#|G!|cgȞs]IC*똷(Mtr,Du,1J&J iMQ9ҁRNo"eDEh KZL XNb(A\ddT7e.iSe1:ZG\<_%ۥu#%'-$T2&AV$idBEHJb6*/_>T./3 ݪ6Bm(uuc3Tָgv^ȗB o } {4 8) #x {{?ro0bEAi OHR -}$@tYE46F*5?^НK(8[Zav->'f<;?$n[;㝤E $=[zԔmQp-;xM1EU7{#X3(m9v!TMLjK\)EoMX&5t^ȹYOO/&_vx~ϽS@s 3!Kkհ(BQc%>FQc%>FO8}]v*(rFؖ!@6Tp.K?o"\ZiȘ$e !(ʆ]q5l0qzYx¥m>K?cŜߡƨ-m9-6℻A2XG]1*X&.M/j|!ům7-6{ 5o$_= )XIhN=483r^kq-nkyYw{ ir֬UѫbVOr0)QAr) *q?[r5*q#:":ZSq1&RjWc-uda!WT}Mi &mQP,RD@u %jXl9 y7LN?- }BuA$^O`er9kf 8*O}`RI2DNZ 3N?4|׿}mjX6*{y:@//7ϼE-&(# @RLy!|^N`~!AnٶMNFY5IZS婬'7{J8덠wmY$zurʹ OfSo:P&g$Unr$z*|6iFi^2juJ/PG;/w)⽈rFZ_&wi{kooۢǾˋVC|

<=Om˹_u,@ONα6(-:xhO?Nz޾ENVࢤMC2|5ijjo5501C>rw| ~>nξk3/.??8/-2Fj.JY[laE |CTK bt#Dx")v kR[1ҽZ6$8~8#COFQ ٱ &hj *N]5{E8ipz>=hY4L*@B2lklѡSm.U{AuFP{HHvkgZ5<ipםVpUw7WBWC H҅Tj5%T観>wz<観e4efbAĆAOlUTt1q҈^J BH&oR-hi4U9%WR 5©{/ǃ%^ '^ef.N`2U,jDO1T3WPkm]L\H.6EDhknez/7y(̷܁=B+f5td3S mLTgR\ m@Sc8Vƺtzv/1y(w܁ߢrkaU@ҥt.W$!at6y&g#ӿZn`_L-F.R˭Qܾr*I7Xˍ X[>棙H4?t,utMDL]M~t!{S]5t*w[a[AcQ[my eۊ@5iT(JGgA)ʐSrQ\ GcٶGm?k6Oӽ+vK.] xi2ΗpS@&2Ε2Q!DѤTj`3T H!zo%aѦ/:uTlʻhޥ\[:\t? |h*'E5A4pRE 9Uˠ b<3D\JkKreHpcrLl9 A#2Șp?E5R}S$֊Ma)T *i0)EcgLt LޒU25:V6J`ZɫJ*rZUb\'H8p5U'k;@6޸NAud B<30gI{Na鴬9ōpG6"ք6tG;NgkAvT袍ƚD_h|7˽^(-jByH .CVj+d  (C#/@7Wpz(Y&yϣ;y@m ww vl_wL ކ[qu.؎z&N0vҁ#PNDEw{Q;Eʈs%M&Zen?L0RUqX۞P9|V,4,AиMp&(rIsUa``h?W{dyG$}xv~qI׏?l'v3[;㝤E OS=LI:{b)qۈ5b\[bo GfPNA)rp%hB`8ԖR2T=0UMjȹYOO/K_OǗ{b܀)zWe0r*͇_ ӧU+@#*AKZ9›aD#r"G%*aĕ (+:@ *y6J SXEd0l/صg!Et(%GS5qAcﰬrn͚(FײBwFΎqq6886M͎[j ήM_|n^@oH x5(']#^~̦ /=C/C]cn9"d#iPz"}6j{=INzzwۇIf?gܭmy/;0C[n7<.{^n7) og%v=㭧"z5W꺫+aOwo?Y;b/NBn#6l4'X\ϻ:fξ`Ȃ%w!D/T RٿMn;Av D+ASP*zor[BNTdXC] zQgm}hȾ#$oFzX~莏ұ3}z Z>Tb@Se}\N8Tl%n&T0 xV@r(J3`E--_ڝ!j}ZWc6l Oh~SyL صEKc(VTP".fN \&_qꊎd\cT- i#DlҥdٗW&iq0r?U; = i೐ƳI;9M3PhѸz~5OKtzizj~Twu 0N7@v׸~)iMH;q[=J$R5aᰦzD%W'C:G& *b{. &RؔHx ɶcf6-3 hZFĹxLδhK;z^d>L:[+t.}KKԜi 1EMH#x餰7:7Y 9dBšٕI8FcV![n` 9dB KZJB)k2"&=K&zԞpq2\꒯5-oquY3RCr&'K<5DRl&5Z I3"d7*z\.v[ӎ! a-w6 s\G?(Qma:O dw!']/eCF#,Z`U^h!)ۤBL{3ZY"BJ傴.EVu.@cB#/Pr6Bms: u lZyKkofߣmƚfӽ gQ-5!}ώbuGsqմc3R]eP!qfyMVCssCs +a5(CVQmL'i 1nSJpDJlB,H]-!3__Xٚ\d>5STþV%=NR뙓&)fSܓbWLkTx>C4N%\YgLTqU)F%o(Gx*GxjGx2GjƈOh(2,$F!;EJp5Ρ̬֚6+6lXEȶcqym[>k&g?볞rOu~;ݹ8hG#{\z(G>_o@Vi"G˒ʙ@z=9nQdv.KVf 2-\B21Cr̘mq,u0ey,A%ۈ(BVhˌH R Rd܅Os79"Lw))}^ѷ}+'մu_.|Oi&5Gyec9ֆh{6H*ޫޑySқduLކ=hXMl;Xz0x*g.Y9޾Yg߈7fv^Fa츌Ube&V)#0)Qux@aїKkUP8W 3_%e"Nm J֑Q"|fP!W-EFa#ˑQNǛϟ=gcо'Xl$=2dS38ld'r72u4[+@cq\^do؝w>?g~<>#2Ўk'ly}IA 'O#ʥGČ:oώ*އG<1#VR=0~uDə/xک,8=# +"*36HF{o?"GbBGyBL>=UrۯVc2 Ӫ_F"\`JE֖HKRQ*K+2M8Y)x4гzzynU6+@9%%£hXI"P^Ҡ+BmВ)O;S â'=t8v{?sKIӊqHǵ&0F&e2yLެdf#gbŜ/h82tP"Em[vgn2% ܜtA5iaș4 4:" R("3;䌇$aJVcH 㫊'٨*Hz9C 4V̒fDt@(xE)\¹;+g} ^lN[Xxyէdt}$c$itSJi:E] + L~%$0lpnC}uq {8Kv`xV̂fOuL}N薚/HSwuWJ0H U]*Ȉΰb X3FrԱxٿ߽K$l8Vt}_0;\Ry =hJ2 ΋ȅs ٽޘhQ 1!!Ҍ?Wnx~BRh5[RX51 =(9z&w//Xs]ȵ~lFoc,cS÷8޼=,wuW6tha ÿ_~TZ)qц M__ i }ͬwo9{%8WBw_g6ޖ[Lh5~9x77t ]nuwsT_`~z.߽p%mӪPo R| TVJYSYlsb;~ ^IJ~v6[0C}H9'Rm@Ƽ[ ]#QcU_NM9ۻӹ_}n6msEgyԜuz_cq1:'o6W,|h2S3/Qסr%;2"Dރ;wd1nF/,ny\:E+y @& LFՒKT:`tFˌь1cϽVuͶ=NEvKov[+ex_W¾+v_R ,'$({W$0j7pUŽSZ5u*RZ +)QGpUVSVUְU-%rW `.fJ2*3]o!F,cU WLtvj$k?5QOpXWsUIlX(}0qǿ]:<F)Pуu0۹? I/{}dtI;@yM?sfr8J2|+t`5sjxdTy}PehX#J֞xkl|s `"3kվ:o(R.=ox9AXpW$joઈhWEJ;_"\iP`vH`2PͮHJ{z9pOcuHym28wa-A>}}FvhU7s ~O/+G,vtfco]I"? 1:Oo^tӌjDG [z3Rܰmg_Frfn5.=E~vxnߜ|㦎?z$x-$P.?zq9]J}M[V1og~`od%qdz'$bIh kF,LJUQ*^_U02*Űf0 ~zh)3㖬˛G~17@F]>;GlzN[ϥd'">xO5O Nq]9R!͐= \!Dж#&&lV%Fb90 +]L;zCNP!yXRP9jJ!h5IHgVFMY'\J;FR22C*ЊyKqEc^j*"m(HP4A 2@oDTQQͣ-i&t%</ #bFĻ] Ue|ŴD\,.zf .$3\PM6hEbP s(e2.DBbEQ9p+xXL;4<<܍*}Ap} kb٢gcwa{a//]&ba\2CħqMbp9uP<:}Ŀ > i> khYlHuxcU1]jIqZ[YCkP]$L˔!YLzʈ[@]|*qXizI^:w\{ВY?X2mo¼ dqQՙÖrSך4s(sQVBZW9BC*(Mssb\&%ڡLJ>vc3XR9ke(hfAP!IS,I{lT\2/ߓdxfuA98ms19#6'#dH}ŪsDrs4t29㊝\&;a>4mLV{[kGP$ [a69xr&B[1p@xӀl?Q4LjScqȢX/gsULZ&)XJ{Α))>{jk vFmR* xbA VIdxZAhC3Vk$Q>'<:ð˜Z0q~Dc<dځPjyɀ5w>4A{Hi8KB$* QLk@8R[|?:4f߮2o{}kc[Ի_h(欸;p)]oCxCv߃e%<6spVUh|k] >7up}eɋ[)}R5,G qc(D@0HCԂ'EL V!2?JA>wq"?4 5C&y8,\2pF.}3lFA㩿}nǞj]FY5[}[[ 1n-~9L녘31:.+;b4(V ĊÑbH¿A+lObɿ~ݺ)֯~!pi @bN& 'ϙԀ{NP5Z(.p))\:ϭ$)!'IPr!=#UJ(UÎ(ԥ#hiީW5szı{{c{_ə=A1t&{BGPHqa!;nq\:;#H}#0Uߠrڠ(&+CssCsƒa5$˘j⸅)!Y)C2szX"BW[-D5_s:׼t<7J -GZ%6kkK'ۗ䣧/D1%ㆼc‚WsV\[E~VV1Z1Zy_ x|lWR^-R d"US AdѸ*FFiW Z`Ͻ.m]@+BB!2 +"Ak'9& 5i5XlDɐI9~[!Z_̞ږiOФEp1"ɱ7^ȘӔ!NZ~ ! Qt7S4~VeQ!gr.qdNO<!CrĘ%GTĕAH!p ,AkΤ2%s *E uXu#٤c^SIv7긂#v\ӆ58/_ܜf>.12iJ{P mOFj < 7gh j[d61h6t׃s8[,vo{KjÙRzQ"1K^GFe{eMBLg;xa ~emP޴ħh6 BF  6 9 a `3hJlzc{Fk)RVT R/Ͳ˼PC4] 0c2k[Z\|hTd_Om~v6YLdtǷ]3(4"o/QE.~\fm__9+7~\KJ0߈7wxwߗJ)Oq F.{~>n|k+yu#{7s9Lˋm6?en{u5Dd|fg|~Yߜ4of0mѠ~u'(FNK _"/&i_׷tK4I ~X,kedKY\7~q(jͭ GZɩ('N?\ 7c2>|WzU_?p2˼`ۭQdCs2 XL-"Kz=)9'qͧg7ΪO#[ll""uE84܁ZøfEKDi4q999Sɶxvx{x}xˎT>z9ĝ9%Ն>Q6A$H΂T$hmy%PB[_?ljܧfũBBs$Vv}&kўK''{54RSu@Ɠ9QYE엎2˜MW 06n)x*QX!Y@Mt@ FbRM̼=#wA+lZēu|o+QQ;Ouo\'}4^(b $I欷\shT&I{‰ h Ah,ޡ^d]ٻ6r$W2;mdh 8d=`/sI].ૣ,y$9oݒ,ے-(@lXjVŇ~*vTA%w`xKdtv 8dDdK:ۘ,z(mJ~2L R{׉+30zfg~e`;2l׶6'%kaKǸfn+RaN'x |5K_ӡHZK2 9kd2[U#@`>(%07¾acDJ,ݓCəF@$F"[kI)O,!|r$[b+xb;`n޶rY R s'PQ:`20pKm,CԌS0c -j9;Jwb wSo[#ŮBRyyǃlrޞg\=~BT4IPgHo|:Zhy9c޽Q!]*{~B:|dL,P13H!kh'т'6擡ZmC\Bј,/@Eg}N)N:m`)jن}ڤ#.\^_|zt,-/ T>8vr `.]=^}$*9^](ZQJR!(FJ 5`W( Z 7It4Y_ ݎl/3ۜ2(`.6sLS%ȎsҢ"w"2g`ݡL}B)~BuC' ;$ZC~GcyE1L-BִG`snWy6" Rͦ 7M4tMϷ>zQaX.uFNgȭTr^eT [¥D I,$ #-(.i*2睳N2FwΑ+W:}L_qۋi0FAA٘HȌ$shBSC)BҠo_pwvqtJ #R2(z^ ˸mK/{2ۀ3%^UEJ 琴)0x9p>WniBkoa \iAH pIxٌ> !2+Bk|30Uc?3 $Ҫjʟ qۈ٨J&8ͪ'B&iTH<]Pn-~<,zaqɹdҿxw9['0(*|(d49%D%8R0P2< 88eFpn:M'~x~0<9HC[ϯ-~fIY9аX.12LŵV]f<ws-*f-~B7bQd~Hթk ɼg-Z9< /`QRt\<ݫMg+ޓ I+4XBեw]٤~ml.',r D"zH{p?z}\u4hPnѴ]÷i| ^W ha[x[:|LCH;_jV=b˅lRYqXW٨ L0D#_JvEIlL tƟj#Ua.;Ia8 )A{sK}9HŢgh} KHi˥8m]kn"'Nז"؆xjN|Y㺫 `h`1PisMFR/?Wv d(̮QKRçꜢAɣ=C+ ubyQUNIm͕5&!:sYµm=(U*2@D )'9|,5BC=$tTμimi P&{ Y[ƬF$i97J:mK\gW}ڧ܉$Ie+F\$jq6xD D*{͌MAY*ڂt8ejt4s4w' m5pzZwIz''pNIWVz2#$_Q@q K C)+U#O.dqo8:1ZjN|#k!*MA1K`,zie!3keYDoK:쵭V S" /Uc[lB=G\I(YC*:}V),5E- P҄JΝt^=S%/ U;d!$ٴ8O+^R7,^ Q ڸG+bŠā;Qtyzzc|o)elr:hY$QK`F: ҩ=Gқ*4 l^:ii0{_;_vKZm Ėjv&q-#6tT%Ƹ0Lӹ?{GP9)fDς^;Me#Qd (2YX%cn%oUS^@yY[-7q34Oސ')1'8c:IyaZs6GuK?PR }AN$Q9^'.s-WY($/v~'׻+} .iY$4B452!J,Om̙+!kqzЍ3X -nx}(']ν;o FA f^_ޗ%y.nP&Ρމ IS1jIѥV:YFmUIBM񰬭d[cc:tq;.)#.)ӋC)>39h#2JVAXgѕ?D(KKEFvBZR|h2e#IXBg.Nt`\f^glxy2 0也NB\ekmkw6{3y_@aǝۗӴ`M=rBGԮL%sV|,!ogB7AN58XGNsm.t ͅ&c# )UXsY߀OY޴Hïx`9'6b&м!GY*kVLNc{#M\%>ݤԼ_dp2qؓ:4r\})@yBΤ9LY!_|&NQ51 a"O>Z+ì֐y]j1j넬S 0!?6~~IvX鞏Iq@68(n؏s/YYk.z9ZI#AzvAx@cVicԸ=cqqUAVe!xVY<^ԊW,oogDy 5%m8wpN&Ջ^o44Z+|kre?bowauMeoɲVbrkX8{@]gxRyXBa-a}8lo ߭@c㬦AZcRzC-! IRVMC;e[vQ3:HsT9F?̚IׂWY6o<kmH_0] #uڀ:K iH(qQ)QdJc%pzؗW7jYs'vDXN>Ƕ}lCj؉Z:S^]%@̜랰=ޞ8i(+& ]aeWO|?tKn@ysS_+xү(U_;-a,[[*--o `+TwvϨ)B_eTݽqUE@F*c?oJnWϒW\o?/şgB;NJf&sURc܅lJ""QP ${,i'lJWȵ~. rQo7c!!`#poO\|*.9E>* ȧ|Z3H]G]rqoQAv]]*z9Jhk#u% cZ썺*uUP\u]]*U+ʀ+rx+OoG~3N.LV4UbzkyB3jfǑ(PJ#bÓvečYl 7*mZ1׾<R'eb[M:mdO:bYK0q<*͔.4)d.6n08<;fH avnY?,s*~kf|aWQVv-aA6>pmErDlo"BdjUJmHFr(MvUW`{ 줮Z uuUWW/P]i+"ذRȅRPmPvi#d=[IaK>~3-FZˇƟF\#!(rF p7>zREf@e<!?s p p5 DsSZTW}5X*UfZ9GfBj!2AW<խR73iIkE|(T rx)nt>Wmns)Dd<+_;" 3?^d׫۩-y?""O0~0EȤQ<IirN@R0P೥M.dN׃)ӃtbٵYr8+ZVK%fW&0i5֪ :pQM~x;]Lq,:c:8s"%zo?͞7pFa(mJ?wѐ篚`L.vl1ܕj9Gks=^Џm镟 _^>x윭 91pOVثw !Vzyv|{F{FodZ5V<[9TG:]7 _;BV,Hc[5*|<::ѓ1Ë;&Vݣ'nus%WNJߕ42"M_NRZt ,Xy(;'>l8^1яo?}#xo=Y[[G;.;pk~C547Z`hkz7kNy͸gV haa[[/ߏI_ߎOJMse5J'QWl 0{DKտnR%PiAB$b;G`]vjMΑP1Ɲ1uoAq8I8 )A{sKF=(v1SCYB%ߜ~{I:0WyM+=Ť7Fm7(-,Q88RK͜%L\yuLَ‚VCs xZmi湃bg{aew=gk{,ޘti­;*>IGE KArDo֊#zJB/ыV]iHӉeNP̤ζlCIjlH FdֈU>-TݪEnIˋ@,DD  Bd>f ErL琘OE뽀,Xx4#%+טH'1˹IZi#@ҟjGȹ?,:KU%Ieg;F+"$ǽA)}k*W?Ňa#Q 1YQ[@Q:F'fNqV.I~hr˚&,A:Lfa,IgM <90\pm#'1g-deB3iS2XF,gPκɠwKlt+r 4-%Y P#,B__{-= (%&^j3E~be ב.B֌!KFsGpBXcd6)U '}I7v:Rie/LTlE9#a yN3S"SD\p/W_'z`L'MƗb2!dN~Xc.;/2#ċC/m2"D[Aѧz" ,|Ch 0)JPGUEASԒ,JPY 3{' g 㳯]jt!_brR kp'~8Z ioif<\Hc̒Y6١M ?SZc:?\Ty0td,ǝ VCo* 4Z@'p2bw9j^s|&i9DlsL6)&R)5\9Aasw-,<VQ3"2hɵ׺paV= 8-5˓*tRԟhpALKt0\6VwoZuK?]?GGϣ5>V}]yY; /ehM-]߹U^r4k?9@0$|H5tr3o|2דI)i GlPKntָI+h5mdJ{\*.b]V5Ӄ_۵ | ^KIg0bzߠ*Z(Z8y{MXۀe,m%nİ \12/hC NU s|Y$UՔ͎CU0Y3 Q: w*R"WNV#L/wh{ƟvՉBZύͺw'8|,vdl"M7:|~upof%:>Ne̓:?\wiUSt7ְ$R\h۟Fۏv=i`1%] 2XfBvAEÌ*[F*oQ m׸N,FqV >b0V򘐤Cb8fIl* #B9LR1H1]cZ^I1bxI}F]Y@PnwSX,bN":qde<R22ǽ br&@|stҰ^XCX(mN@h:w)̈vLYћl "gQdXX%cPYu>kC!xS83Z 53Vw3L]>1D9E{4l d|渼 fZ.AKSl!~*TN&̍ 6^yd{f~༵0SKZcӉ@!"J,$m JfQZZ Ҍȱ[qWPhMk F4֗>2@gm.]'r )z.6 V7>.GMjTe/փڪ{'y ò1  Jh:%ԝA M'4Q.ktsh.I],*4nnSP(R ŇF,S>e"+$I VU#gdw9;%H r?͈=v ݶY}wyq>3>EeKjJj=g<Eŏ_u FhĹSJ% ZP^<:%ftg)HRRBd<7[t4Off1G+ VqO mGxKݿP_鐘S$C6P{ L/1"3]VQUyJ)^W] r=&&yK H"%aZIRWΠ2(bXXd7^o/v }xmۋ Ayn/3 InҴ5qX ̬Rꆌr .q*aU>ug, v Xv}0`|a 7۞ii i@i=OMG0 + ֖ p7yѰL{M/ D^/zUGVߵ GY<'@rA TiR{ƍVh `^HR ѥȲRd Y X6dὡaa2ʗ.~p{`VC8fBy%"/{ =#d'J=jDi+Z}}ϙDq8Wi(ǿr,icRn]c <5O4XssCs#CVQkR4)N֗O%R:ŨH^RVa B& ۤ)nާ[yjԠЪ6qZ?WWX%=<T~X×zzOX^ lFiP5dh<0HK(;J`Z(Lلu`A åUn1d"UITYXf%Z.} lìlM2Gw(*iWgG96b9ͪƮBޕ nwm〉&Y{w)RqMŲoia:,$ft»w=K^>(ڤ` 6]^?'~B.o)0nlu3o{zɖNlVw~ӜuH-jٴں{=?7yCK-7CIŻPǼΏy7@Gtfxs䴏Kp/zԦYmsJ1~WOAe)K[o673ydŹ ba5LvWNH$z`6 k!ݲ4?Mg+5]Rd7(Hr0!Eg cTd&0gNr\^dmYK~|xWK-&h.VfEzIheIAY{4Z-4Kn}sM+ cH*[I+^~ɉCOHګH(lP}̉hi&jO8Yx.u鬳Ez\qqCVԐ) c&(1K̈́C-XbFF%RŮa5 Vѽ"ߕ&'LޏVH~\͗4߅'8Tp{dirѲGЍ *uX~bXW/YW%:8RzQiX:z:Rd-,ե5I]lο5kqD%& Zƌ `(Da ])tKu$puw%bl" s.%oE\R⚤E^H З~7%aǩ'Ci B  R7\q* SO5JF>MIũS']'Kn4cdehF8`tڹ,Y.!|ȴsc !ĤS:G 1ksXqY#Y+c4F eHX.3"!+H1Hpj"&vrD8u,h&Y6FmxVhԜf">W'Cd>skmgkHe *wdL b3(iC@[e4hfFÚ&[`p*gŮX9޾dY"7KCVl&vZ&ahPrVv3,=Tdl{{0].;Sb`p'(wgPߴrB$(K|\C$ڜVa:hyYgRy!cp73yE9?]^v2\|{饸#Ơ3s7dE-F0Pzn7`DJEĘpD(NxƵFrf v*,:I߳1E4! *QEdV mHe#}z}2bv3ͰZl޳^R ^.#Qri5\`K!֖ۨJ$`J)(ҎL P 6eO#{CPaVrJh$£Ʋ663`H2 JJ.i&0hIH rn^3#ůz50%c>km`1Lc?eݧު>XҢ7YźJ-֬]G P6"E-;]EA݅ėjxYm7*ƉI@s! PHdEMxHLڄdra|?$\yZIwL?qxR\ [q=!b@m"> C\V)qʃT>S>"9LGQ魫}?L.?", 7l۫<}~K)6:ӶǛ'} & J׻a::wo"|qܬ*hPqz,NI_8܌|(x=p.5d᭝I$ܻFV*Ҝ?f$ٲ z mL[7GFb,*{I2܅omv fPegi>7~O^^=ڽ٥{;u?W?W_AK3^҇ǙX$ZW:RmGo~mٿ_wntIvrw}ֹ\2wOIbxJ7ۈ7$yx>7\TlvO378-v_Mns4wrq6ņӗ-s[Ly)9kl}YēEh~D٬Fo '[\ms.~|Т6v"Y9C}1dqc턷; B;`dhϝ:dA-4J'b J1k5d`7`kY넘cӲw?Ё{ե{s۩ln[zL+?K`ؙ'.-CR ~R*ޭ24G:5*{ApѰ\ H\̥UVUR> \SR_\RU^ \r̺WEJazpedeɂ]͔%fPU妭ߙ;! pO*\[.xƣiI~j?Z<>2F3ϟ>|hpIN_M0ѯO i'RI_9ǫ0p;{#[څ%4Yd0\6|J j(%N!T>?˕?ˡe9;G14\o -3{R^&@+/7)7s>"oPLkvIpUbĵ] \iv[9 Ws&UUtTH+*\ZpU=HkUኤ=W[a ,U.EEJ=>"\/ H`./抾H7IJɰwWjǩWPV^#WgĕgOZu&cp?)8`'JWC ^\*ڋ+N܊uBpJb\\珆'{G\[dG"X%!p$|ʒO7;Ǒ4;keԶW43HVznׅ@{OW@Ɇt 9FDWp\]/ZvNW]=#R'%v lc$」Kvhwo/mc~y~noc@wݾU<6bWGcfnh 'IWWke|QcB Y#qDFW\L\eqխ\fmQ!|xD왹iT CdMĺYc7+\^q~STk[O;9٩ r*Tr5bg*`h9iX rrR@W> N:9aY] -U'EW헾t5Pʁ#]A,i[p/Z{(as+!.8,hFBW@UϑK/jzFpu1sWA`]uj3*gv +cCW.-f@fj=o=͇v菧tZy"uJ EW~zkܚ@ rKAp/YV]_wvPzz?8ݟNqz۷qAͫ}z/99 nhD ~w﯎^>^m裆'?&^-[}L-t} mi3?޾u*c@wm}^ w|' .ޜ\Tr M8,?ұ=;]a7;=ﯩc+ю]v[d JL{q3ׯ~\kGr j--F 6wF-{jGY?6^]pp˱S/ S=j']=Cr>Z]L;CWSoUZJtJw;|F_1J@yf`o/2nǽ֐,H6 3FDae@ l-'3-]VvC}qtt5K+ae1t%drߦte^O EvV`GcNNO׻YowtU^xË7A1~{sߟ7?^imZ5"kn eo?>jtA@oǏtzz^UʧF6 Ƽoo_qtuyu~`EFÿoM,o\m*'W'nqlt!?50+>}MmڣȔ q](P篳yDU|b+^k*ۼyܣ>Uww̯e"3p@|}iymV -  ͑gPO7.vHe[w'>6+/dſ̱%ˡz#\MBf9w&7S}H %x+gwyW7,cƕ/Z}n~F=X-\NDŘH- ёcg9,`$ZT%vktPFSf ͙RqΤjP۬Xєs:]M'7k],TM 7)Z<ԝX sSr`̬Y=T'JY{ ubQ =5ZKQ#1Y2](-VM >2rN@! v**6(:jUo:wmZqY @rܪ@!Vg_GWd$8tyvT:C8քFnu/y ,q-YgeеlhŽbl{EKxٵP@˸sEQ7f=a=g3nEUh[% Z;wMBAqV*SO)^b :'؎~okҥ:"RX dlc!}Mh(pu 8) d> 8׀R %.CiZV2T7 < &Idp*zbN=3UFmTz ! 2nPư)0uJҜ`ePDdBEKViLsR)QC(f֜ A'"g#~C7!wL \LZJN u&zMp f<2C@?J ޛk(,@Qā.(ʨq`j ʳ,yGD),d~G[m ()gRNgbU&KCTݳ.RS+է Q2/gU˙WG( T&% p`l,@uG@H <|amhCk4-d3W r1/nnz1#.M5q̘u(No8*2҉FQ.`b=b~6VUXiUz{2?oFtkA#U\/k&ڦB&a-A7 /MQ>o9@G1+$_{HVp2:bQt( :#.g؃VsHVd:rkQ4XVѼ<0{KH`I}&H@ɹuxQ< 6tX:UBN5~T}ky E*NLcm0|Y+ 1Hk#dmq~zoQL P詌6#+tme,!z,%iQ@N!/f90(TFK]/9t 'pu51 jki*!DM !+ɘ+u y@'^AWHPhPS`yeV( @0b]GВσpXA"cR:f7c4XTЙ$ fjb2ŌKL pyP*ZxwG^lk`n*F2'$O%^]Ap_w%Ʃhm`Xu:_$CT֖YI`OߧpbvQ^p@CZ_[jvr!~w9JwHd幭Wo8X§0cϦlg_Q.0zcXͦt {zMNο5L/q6ǚnue6jv>k懻-3dEж)We1 qbL) -y~v(P!^yA\YXE|=+kx5+@\W"Nc\yp$ HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$"S ͱVV$}\!JGc\q|jIpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" Tp% 5 pն\JIpu+e!X% HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$zRx&*h5 jS _{Rpeg$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W$" HpE+\W U0v~`F/zb(n/ݩ<_ Zc5 }>qׇJHv ~{++둳\u-the QH WDW"4.j+D+Y Q*Itut^v 6Fڔ߉՗qsoLC g_=Mzs F77  jNSL:sÛ3פwdKk$;~վŲ`: E{uCb ](͛'bMj?.y%We^FzacS N ^{řimkbV\SZV3 ] WZ@랺s6 Jm8BA[\M23VߔA^]!]ǽz j-t+D)ҕupU]!`[zCF(C+^uU/ Rt剮:[!mEtBJ%Qҕ̭ {㪡+X-thOW\1Ήީ^k5vP' <Ėݞ}@BM^0tfe+Q6)yQ ɜxچQx?tJ@:>g/ңRKx:-S|ЮngRcr1)- Oxo WmM7s^G2WFQ'lGOٳe26*x ڜRQ#+vV'hw;>[S0_hdV&9'._G1)(C}rx~QJAFrTEtµBt( m<%]末) {ͫ+k\-th|<]sDWGHWKgj ( rY ]!ZNWR1#+PؚC uEWWTCWV"sό0•l#x"JM[~C/ ^>%0^0]WAhE0K,jU!C"Vj ZV ]ZX Q*+!5yWB2\5tpw}+@ɹ%:B++BB x_ϲС'ZxMwH5w/hׂi!B-NJVgWFI|52V9wA!zoJjn*+L=ag^Z)t PJeP2kzbռB%%]Y弮)DWCWZ*x Q 򮎑f +S ]!~[h]!JKtut嵔"V;ܑD]NWCO6ƿwu`+_^x0CiDWzs2Kr \k+DBg rTY?ӾBBWt(':BFW]I<&WTCWWWCW޻B^]#]YwKOZ 흐7.{5oW6z-4g02Lw3L`;# _^t$/3:?󽵱 tv::_8.&*F'lmHg߽׻w]g\TTNIhX ?I,*|?W5_o8.!iwWwY N~Cz竻|{ Tƣ?JȲ۾bg|1^eolз!Ƕ >ouCo3NE]Inmoy v7?lkJQ3T)"+uQX$.SD%MdU&ٰ9s[]+S' 0/M.t\1? Bt4Rꐤ%Vh[}iUM%!jr."F 5V| f/ e?ZI1Yh{l3ʬaQZ~6e]^L&]?{Q-c;{>ol[Mfyؙ87T}7i2l>,0#eBoK{~ex S )m[xEG ? ϟ^m}w{@h \ : c2&0+y8X/)U}*l:ln{T x=-N kT1A9R' JBCglz9Vwk,Q{2|w?)ek,&B -,Km/&?~*#\)>y~t zсRpBF9g<<꼝Mcysr_ |qn챛ܹҽCV"[Z Ƣ9J*3}Yk+BmDZ)!//W^vms[qgMc޼ݖ^ptηZkuCG#vС]ƽ(Vze|B.tb [1sԁ{)vF%od0./Mt'r/mK;Ƚp/)ީ mܺ╒eJg\j{'[XT* Ղک{iK  \t֖1'l=WpѲml3m ^k_egpv/Њ d1:nd$̮4={GqNQ/Gۇs77/u^fz ߿bY'ڭ] BOӇ@ty@GhX7?7ݾ (l?nVW9Qhe<{sY!ldW{ݕSQN0NԷ:6<ڻl~:i6/;*^}z.ďj5w+o4_ySCkϻ|FnPln?Ml*d B78.B;B%dhvZ/l,Q ُ/dעo! 2g=whVAXɵ`U볆ص(Q0cs\Px4BC[(Gzbw;~l?]^;ܾh_;.m2"a^r<Ea:_N!LۚtÊ˝NԢ,̛ ֧̀['ߚr "TTIbԀCqua.uݱͷ㕬wVn 6;| NYO`bN;s&Ȣe YY#o ka6c2Hǝ-mjqpPc ,%q>FCɮmU%nd\/38w3cwJg\ؙe< ]\ x@fH/=x~{ 4a<~拿8c{m>qReORY6̩&9V+ݦ!+0 Y0iSpT4*[m!cηBiSiY4Įq:;!f_X3 & > ,9ɤ,iKWI͙*gmU0nUA MVRt+3 $ɅA\YKG( Đņ1vn>\2rAb #vfeD1#zbDb})˔,'̴\DV`϶6vz b㢕X.UE9&)[Y\JI ee\%<ȣcF ݌PG&sO{YyQnyQ3E}ZF3SC@Tf"xgC^(R:#8čZŧžag>'ӻx JsܗGُ/h@NpjNp .}?P f#&,.[)Y,"iF#y%?2{~VK̾pw.}+YZ ӫqle|H89K%+*4BLm,h[)H߽~7|Lo&=er{?=DW]y>-JDwP10 \,݆@Ym5;2Jy?<N_x9um1"ڳ$Ӓ?'n)yzl ~o%6ϟ/e BnF92PJCp#RR,@0G$d]p@3ޜj< KO'ݯ<}NI"׏Mϣf߷ |nJӽO";:5(xYO>k>^bݾӛ]_4p~Ý[؇oAr"vN+F7mXwo"+)纉J0i?\f۫)ݦFfsþݐ>}a5/aȦemȾ~o5:'UEv(h$D(Sө%FDh2YY'c!σ[+u:W{_f~C+*nl]5SeaeQGxr%jB X:kq{YIFݓuf4cb#а$5dYFd2PeŌ![0}r))(A%Z69b2. cYaknir #0ET^O T @ 6}4TPݢ{d]*ӫ() >Og_`-?M?Pfi5i];9j(Jgo}$hWq!! ÖZ}N-zacA[>(TZH} cct/yZ~˧kUIvaԥH؆X&ON',7jyut^\^&@7}G  IhF-8ܓ#їlԋl߾Sp}0cnCis=PKY$8XV2 MY.goɩ8r-\ $sEϒE/NRDŽeb+“A癵9.E'=Yq.}y] [~v6/gӯQJթ +"^idD:X*y0oglK4 dU3(&iMR& 1' ' Fr?+CjyzLUVΤ_3Dm :8Q[}yDE)>bT0|LN٨NZa#x.9υ|R^338a3_q3g3,kaOB4ТSLZ2 98-rIgMBp*y4B 'BN$ 0A8a1I(2IfMXIi3rv䳖~ylLB3.,`x %AZm̠ K/TB,:& ,P_ wz9,(C%΂ug͘%{(gAd^JB̎Ԇd=UnZՠ^;iV{ܵTHK MT@2(\:%RJȓ̹GqT5TĞ5T[CIaN/d,)Сg29̌Z!9Ǔ(MVm<"Yjq+mX|2Ɏv`0kD`BM䚬dϲ =xcc^|9F!rTxNЫyQ']'!M;dU% He@l2&?+>*~j:SExBs9Hv(!4Y$рdqFA TX6af(V p>9<]MZۑÇ&bzN~hs|9d/E3lg4UYB]c+CMM7ݜwC?6M/sݻo{ɧf҃5shF>L|G#Kȍ7YW@lEPkkQэݭtT/JAYRrd|-&POIgnlzLhFh/RKPSe8zptPqCup1;1DƀeQ L BTOIdYIPUu셋ZSNVH!U.Db@f,E-seaPqg!)k҉/jy֭]@31֫3gw7Z5 `8v x*p-0%1?+Y -lWdt+mˌ 9hw@H8;HZD2:FrELKJۤE琣gPxj;W<B~B&^`B*Hf3B4Xӵc4=(KfsY'=۲Y劔TAedL6+ɍ0IweVpR?{gƭ"V* ^7wSܶ[4-Iؒײ{Hr4!=tH琇)2_yMu1m_ٰxXцT^deuEк{DzIum"K72kgY{iGK%, ŵQ2FIp3LԀաBB_*S/qJkiTIigqS1 6rPsMR6ThH_5)!aSV*&ʊiYF\q:VV AWgnj=`jVG4ecƻqSOdNWOs<美ȏfv·BzJd_g~||;+zi,5 ; o& nvNJw8[Sm9`ތ>)ݣiz l~]^y^ꅓM+9lJwl7&H~jcN=NKu'Pn@}m[oz;bgV S|>;~Ћ{NϧWN/ZyW뺐# ɏ>B7V|1 ԯfD3 ͗S_}5|6˦Q^~|}W/NoN{ {zׯQobm>Hv5W5o}kia[ qR.,~}վ֡@RtYt(avՃ[nGOe?G#|c|WTJNVB,3ܽU.'O{$E򾪀Wub1R@Ww?kK0̺3-LCyeĿeu;xñnAfޡ5ɍWd]'jdCVwߩY)j~#5aG[ma~z ')wl$gy{|hЦ7^aӫbjk=5ĦjCɫPEk_o9ЦJUS;9oS].PacPAKt&C9{v3ǗUmZkػ?~u(+gٓga)drn.v@CpӮjccTj;UcgXF)!+C\:C+C6 eBW@!hq)% 6J8+DMgIC벌p!7jZ_yQH'6jQGlXx40PŃvUY˃3}t%gǺo:1i17RX}>L>p9NR,G9#W/k'kO8j8Zζ[^ >)S<_~Gyt3:b?\Ǟ{rx䇻N4ޘLW/Z2$]FU 8>}rnT&<h||Q[Mkw[?^p|76HvmVVjw[˗OhˏWW~9~;oE{?su}9[,o0:r zv`Ԡ۞a6|[#7?un5=N=Z~y,b~׹˗˧4h(7~[oVy>1PZ*7nUQhKH0ajE!鼏ψW2Z[kYxǣ\ Q6 Wڂc\+5K=omq ^A'5^'dlO~6S0ϴm;glN0ZL#e"u&J.LQFlyI[MN3 kOA-XZIY26u<^]NbmբC}E ]>0cQڤ#K h~zqw\j ϴ<3#ά` Ff e=qκõgF''8I*hM}$Je e$WT>>Z\!Q2uBJ eyre3+\n>g'ҙ"W+&d]!hpY'ZR+,r5r\SD\n> ZR+,k?HU/'mٻ\u}o턫yiʞ"Q"dEZ;iUFre6rE"WHK]2"WO"WBH!r+6XW\!d.u"JQjr%d$W Le#Wm.rER.WDoՀXXKX3 ?'Z+6jvVmR{x=2-U0瞨`ԿWp~_[LJډWa :F) (`|uyqt(/e@X}bIc%^l]rySɮ6Zb cg«xO%;19Y*Q*fecf,s0|@8lyrشl<9;k\'ZyQ/=9%%HX|W\ iQՓȕr\4EWM+e|reӲd#WDwR0D$`O:\QnXE#W /gpE6rES+L`"W{X0a(V Er W,Wh:Jl :zhcg3+Vf#WEV(SRIJ(fHl q=hN]R"W+p99,E>l+ن.WDU+c7OKi~1v̱Q0aϓPn';viҖlD&)в{a 9@rh-'?LPfeh7(erD&ޓ\vUm2D)\ʕf9C>*\@()r5@2Z)2+Lg#W˳("ZSXWC+-39YWfǺ"\uE*y(,r5@r k\!0H\.dDOZ.WDi\=R;V0#_:;YѺhD6+ATV=7i+W`'\x.rE"y"J\ Pq² dEs(SK'>"#ƣ!\GCVţGa:'B`+µv]7JY&`(W:-r+v:B\e.rE}rEJ\:52+6 +xm_uv QBd>EV(M \9gD` "\r+M$J"WFU'9 X~@K'ޝnn9\"Wz|@FrwpE/()r5@ du.d#WD+48Ɣ܆?!F3shBWV:a-z(n*eI\Kw =A6.pn@ZdvQr(v\ KAlp%"WD 6u"Je\ P4N䊀F4L"WD+YrEPjref:'Bs("\r+UASD+xrE6`ȕULd%W\!<"Z|'Q>DrJ g "WD+4E\NR۾NOz\u}dU7J\re\=2L䊀Ae#W "Z-S+4J(kLN[uE"WDW CGJWjr%5凓+XWkd.rE֤.WDT!ʕer؆?>f6 s,݄k)<vIyRvǝ&m `cM4I <_i EE IQ-rc,X"n{nw8DnckzgUX2kl(#.2mUlzazS?ر~ko _=dpdh{PZ="u4/Z/F_< H"[tzg`JEM[:Yy {v6Z &X4_My^|?!z"M{~FjDo{W2[$m0d;z:~UIu#ŻDA5wΖr;coM;lb4+?NwJn>L]vn`>҇C|nN>j1S^ZHW#7 3YǍxmᗅ#PQż\c~-a~Q75Ly`^ ~!Nߢ79iz҇q%u[\,3ƿ_'mI-i=d7rdL.GF6ύqT.$ra|g[ۇglv}lNH.[}6%Z].Ok .'{Tjz^嘬8l))klԊ7^5sLUc.TQ*6=VF.na%^U}0d5> ܍+o]Us)V,&3Yn͉!z-@Jvs:*FZ0i4FJM6;D &F_esHXR6w窻mLd()/AXżmC5YKae0D!v*"}w~I'. O! =1Θ}+tmi,!z4%ny1k"!%:]%@_>1gmG]I0SQA5͚%l-ѧ!)I+y rN1^!B ;h ΪmB[v#h5P e#kvq5? F#tf% TQBiv%&HTyP*Z(o*"vVWpj,*fa!d*)@gUWH!(ƶDpPk>Zlot,+, MGFYICxS(m@\+w[U #o$XH * @T_l&Y+qcVHmm`=8W t~ 6tuFeҌj`֣;Vlfѓ2Euh0Nv߯c'UEŨն[S^k t/ y"<4z&fcdgrrѰq|Eج2#c0väDy آsE6G=Tʍڪ--hofr%5n 2TA3J`(HȺČ,mAz|BA3rwU߼Yov+P!>cw;"h$G\k'F4$\sc~?E A C( jsc:j#pNg=us[sO6uHuqQc@so66S0si6@ZXf=H՞K =)1fj)ƅ-t:C^ vPӸf J z!@6(=< VAP8U9PT6m5EO(IBj{4CS3&k|TH֞f*JOQ!,2\b$Yn5  Pq!X7k̦r1y{4 J.ȎYyЬ_YV* 1iPb:H_P6u<D;y0B.8- R GR^Wn)n-A._B%/1%m|ӷ/^?A2VWiQ m3دw$nfkqŏ;I[`X`8LggcE[~ZYxv9|xv 8Bqc,kH73:1<m~ Gڜo}<nOXOn]5tc'mo6mܶ|خaȱO衝[濑?t0pLN Wx4Z%q=F'5ZH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 uac 3vx$pQZhw'Уty(c'N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'uy˞ RqKZ(8(='8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q=^'PQ80Icq L 4Pz'NGBL@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $Nti}w/œchvy}~Z]/L 4 #2. !qBTc\Zz_a4P(ƥ`\h=^1zcC#]Ak2GDW㣡7бm (tx*j] x7cR@)_8ٰ'Z[c3]}\CB}Xt} 玈5 ] & ] :] ]=B҆m4GDW~[szcćNWBW#+ ]%,te#C2c@2k~F~Z\!nQ/7mZx>,{ӼPf/ /fiEoap%R܉g_}vGwӳmC6t^ܻX5x>Cn00_1AO^-aj9-W׭YЉԨ7TScYj.f^UɷR+mi}<ˋ'F7gt{%2ryX9+B/;qr_?<.ZcSt˘j_^f4E9FٻF$+`]R3Bbg~XxySn7x*5ZbTeY'0ͪuNlT%rkUeY&xU/+f^~[FNh5S&Q)bV*O|"*R$W$(%ǡO<k3}$~"Ugj&BW-mҰ!]rKM\:3WW!Btj7]u \uUBAB)y G*_u-]B(p ]Z֙UAHW.Wά)hmWJo$;%di8{0x`ϼH JWǡLD)ڕjp_)]ύ) `#;CWw9-0v*({zt%1we3tUr*h J%zzt% !"Vt',p ]tUPZ{+`E{qy . SFh!`ۂcAKxdC224 ´({Tg#/BUN(e9TY\Qcsª.ee ` s 3m!~$ʖm!놯U]`T3tUyz*(zzte,0+,ew])zBHW]+ ;tUJ*h ʖmrסNPdkdW誠*((ztVv0 \*hZk J=]}3tz~ɰNtu`{玄x8ovmtuJ2GԮ稙U "UAMOW8U{IVUA)mOW$TN|!,_X[뵏ovm]7×7|_ޝ]gmG^s_ǥv縞F=۽AK1Y"3Lu%+h=+(MxzLygq`5Y;Z+XbKuMD~&WNTs[.Z^}Ý|˿?|@=8s~]G`]$ CD,u+?-?4T>>[4g)/:W|fuq ϥ_t~gZv)6H]3<QZ婻?ͦ~^xݼ~nas? uXfdPdB Dt)EjCR6dt/P;tք8PѬ׽TDO|1!*wTp5wՃd9E9\3 %!X҃S8Rdb-G^^<0ߦ)߽n`Onvj0X BE =rI,OS`0Ys14_Nm.)FgIH셏,*w(Ȋ_Z(2SFD{:I)O1G $3C|h(? Z :I}!BHʍPlm}O'W=8b&Wj31~riȞ.?>:? -ۈe6ο 2{R+^ӊsS٢힍;|ޙdqw yj5qƓgfԣFb=hPWP!T K :/!^lmV]'>7= ipwI۹ ;otq"B%p̢ e4FI*UʒLFR @#ɁL"3saùxe^|RFaۮoK6ڻ r51owEs0I zJH.wnPUTÐoL$N3qgdz>`\1sZL!E,E.ȭ:PPWmO,xXC<%ŭ*z^a'J8ڢ>ky! tA/ܑ ӁgJjE jmՙCkL:MCZ} W*IS>2L;OV -xhD LDI8(nx}e, ֕ %/ :2 PZf¦#]Z5$Vک@J-b~zC^bFb__7/|jw|*ɀ GY<':F/!)1$3^IF#XemvEjͅ.E6[Ĭ*t,pB[#LfKcp'PN&b-jJy7۝*YOy߼"$}cBi(<{yϙ%иU!%;|(iq5 u 8P, *U`>gl[X2-_[W9gZT9gZb)RVce:2ɹIs[_>U,KSZ[9䵎*H ]m07fv6&_[o~CK9N2yKK3ILL؀NCFi+PW*Pyx%P?/e _$PJm!JR͂eJG\jr@c^ T&ڨ)x YW\H"W>,Fޡt^4;mcps ,Gɧ` A#WW}ڟQx쵗[<@tnvo1^favOӋQwM;G8w0ȝЦGK;WK`П۞+ru5zw8G= ~Ӭ++ZXz_toX:+wCYӾUO|͏G֧?ŒM0xQp2//=fj??%">6?Olo08ݰ N<ݕ PXJ *Do)nQX>d!m ٵlO~Cv"d[U u U5)8J/IKܛԇM[(E>q]v6?> KTc| $ɱ3h`='&⢲Fz̉XvJ6R!1 lfK =+o1m\ .[`#䬜Mp#纞2 M3ccp{16fr!4̅srl(㫗!O~F/?Ǐt36aRL 5ztZfO2M/vYeJ;#me$=(^HQhSQh3ve%0)49̂xB̶vcѳv`;%L2(L%$)ɩ93Rh:YĸU^:)PE$\ƺAsE2)C(2Y⚘ŕI@0$1Őɺo ~>dS!ƶ0bcq*#bÌ=#sX@ILe1(Gr&7RZ4иm*S;mvwmI_AnҼrmy䜺$jĥG3E*$%Gz@"%(Plb"Џ_7a(-7)8E)Z> LI3T|tԥXgk\^](I{E8dhƜOMF)p6PGP%^/B/vE]чt?}xbzx E9nK+}X瘟y2χ)ū_!'1?]|zR>ͫn2G?xr()=Z!K T&cf(ep,Rx+99|7[ez=/|: ?ڑT\& O]Owno}ѽtDKUZ\GշT />Kh,h1d4+׸Ay+*rϽݨ@n,Ǘ%IQC=0%0 ²4}M[ҦIL\RDCJ*9J[k,6n9K[&P O೤iSwuQ*n@>l胤il:zsʷt&] qK 53V*n\ >xυrT'p@)c3(#n(*z,x&yq:|m½% gHV|uZA)!RYjcvHuƔBYz0qla$hNG5]gOt=uudM׵"Ͷ ͂&X$)d.N`aj G)Mh}2ݷu(X;1pKEo'`hu^v`3u`8Бf܈rjpI{4D!8Kϙ+%L؃=/8^HXQIN98% ?*BN"$GImj 1 DLa6j͙H,S1O8KNPE9#ԉqZ FW FW"G1Uao[_sSc)|G.#jNsԗN*FXKtMv!䵦Zz-4$0!N$l2/NbC0FSbg4E$;u"'eQPydZIr} @^l!^MɇΌOrWtqfpz \D"ңՔ` ۨ,p bҢEF3ZcY7%+vx2;iT=4sV$=I,L҆M"jTC,QJHpAqDDxrqgHU!HQ $8'Eb:M @8r VFz5p5O+֞Z+8c+87Ӝp\'9~h5e#qUSV)bi;+ҏŇ"' -Gu g9[$WdJi2>/NKyt\tf㊟O9n >:eGNFi<97~{T7\wzإ~W~09i-Vx /eR=Fm3f'oz\Y 9xUBT!%!gn'j  2uj>U>WnشܺOn0/ 3ookK݇D/ˍ";;0 jPTz559ܕ.b2f>$խ]nP}}ObxNMx͙{gwz讷lp %;VVLDь>ZofTf^!sܔ̶T$iżVz!I`ĂOĸNxH:NlD^ya?U~K>*p4I3؋Rb^$L-3]NT$`&뿖s~lXe%'NB).fTZDHsXA'ƻtڋ+A=ii~<p+F("X4*6QÈDf ]BJO k$b!v4)YZB}SMe,48AxgKP-e5rv-iEKcYFgN$8/VH0l@T\-eb0tY8u&*1{dM1n26sl/,X&b`(LkS:$E$4k4% (.hbucPȸ09f7-P.QH$^EIraYRuD8CXMn SX,?۟4"Hȝ='F$ P"b 3P@yEOzAjWɳ4z*4pOC$85 y4|\(OC$x71)wϴͅC(~ ~~{i;B r0ufQ~'#0+Ɵ $(:TIE-7saZ6naq $xsAOdM'c^TT(^4ܺGԠ3cȸ:H]S8<h:C%x~C[7Y)cuU^@Z+of2'ǓE0kq"#\ԏ?uq*ER%oN|xv~6L[ŭ,> ;[.tiPJ&%ڊ34!09RXb~z~Va_GW^ޭ fs`ӳޮHF?]Ϊhys0z#iIH۸w:oچ5n#%mfY> F+wp^Dx4^͞lšjܕ..iצkE7NIXHUxҗS_/-WwX^-*/6apqw߾.?>PfNɇo &}4 >?T5ؚ֌y e_%7{W|}C;KZ .x;!'$\/RDxAWVףXWyא;UߦR!1~@ 5}n&'&|~AwǠ^Psekه.:gnU}drJh=uaVX+ -DK ʠUJ!b5t6';7Mv6l/Z48`<`mQ;H?"98R⁻hi2 AD- +`%2A Ҝ \p-rsYvxh_nw?¡1i?_ʆBl#Fa -`rga& G̾8 ]/In'Q2զ׵,g2@=Z>^+ ^%l" ,QKqت9׌3e EFф%,3%\jb&>qyr9;Yvq-St$έ)x81AGB\IHiH"Ġ5siW5Wx·A<7ֹ 4k+)~o0΀]%3UBUB)hOW_ ]-#,H#EKN`/LWU/l]턖!2tFj+]zX+53tJJh:t =]FBOA@'{@cX/]&_PJa>ee|"i|QOᯗ G'Y,}@`Bʘ3Pn=9-k̿o{8OuO{/#전;ߦʜս;b';Yv!GO0?AM yHn!N%Pmq~+9*%QIZַͣ%8?g$ԕ8gI_ j짏o7ˆ3A%F__4<()=Uдac_CnkekKjUۨisEJϠ;GIc-f3Z+xWV(5ʌ5J%]XU۝0@˞zz<]%WIWRFYsSv3|zvk)/9f""љqkU #T1|eש&(@6x/-b04M5 .+>/.oPա |zgWz@2"x`;c7\YW솄CJFz "!y KB;CW .̪LBUB)dOWJѕB`JЮЕ_vt,t%+]g0ۢWuƺJh?@ P*[W#ѕb!Jp ]ZUBIxHW1ź[E3A/- nRӞ[==FL /Ծ\%_ /ԾJ}`tE{wT]+,Yw*+t*|tPj++?+̅ ]\AiW ÷޺ztE9wR3tE@t- PP=]=])F8%<$G:#*K!|ڭk,?E>ɸ|tR^Je,:u~?tݽ9=v`Tg*Ū+t҃C;Ng+)ҬCt%̭ݱ.A+t>nDtJ IMN38 >0"%,golp1H-w#2H9rΩMUI^j>zT3i1rM&eku!Zj]meoZ1)3 106IĸgN)z֊FjeƠ_I>xR7cb ԋXk뜇~?<ٮ;X"C73wQfƗX|> mިf .Q^R //|^\;ܓq`$ke傜oD l^GU036X~9ci%Bb[o7Ð<*!<e?`z7 ?N}J.>٭tx/9Cv?B3V^Q n.߰t.lPO'6,q/GEzt򟋔M?>N/VW[Q=/ /Ȯr\6dO*8Fau2371uDiR`-,4A˦Md0`Š 'Bg\̦$cYF;op Bpn@Ds^+d)զWȯ1A҅) qbQG"`""""&ZH0 f#pHPoi$yLRMC,K)ZrܔRS>Oԗ/$%vb?6(* y1L*t=ҋzshhyb6/Wk Wqy8jRګ}eԭ?Gz]wpѼ~o@\:GJ19ez5t875`.9Bʽ㣢ԟjܛL.2JUiQkJ%IGY0sH@ȰhI!r-=1^RG/ NAޣD/]XrZu@^ *1`"{&C˧=KI DpC@,s$#-Ckp&18=fxf6!*wgjZVVr\Mn^`CWk3t:-2$+1NYKt`b !xON9u&n'vg2zRO<_" 'B#R`*"%VH.&D,"ڛ<0O2׍%?I1xa+0CiM2fGqara{Ec,6JZߚ/I(}M5:цᐌ@d `@d/AIBy1q+q0ΨL`-\i8C7iBקͣfQ ܍Ru8yFklFO嚦2vo*i6-xp'Xt0L Uƺ6oHrI`¸h VRͪIYXDW)@؏'2!E'+㡦)ˌֽ1ߓfoWO^[ܦw^~R"#'0 y.*bKT׺(u5HOY׎՚*1UI{}օՔG 4#i6 7F ^u{^Dz77ݎ5s>1#7%!˸эfo5MMqnCίcWWz6)fO70M1 s5L~ swĘR7tLCLQ~6J?\;yӵt.U}Crad TzX;< 픷19E%&(Hlx}_XV촻p6V3՚}WkꞛϜqx:b8k,`uc /m=YuzoV2&p !ѠԂb*`m 53!5RJ Όv8l[!5,rk+PWު>\n[z|W0l[]Wf6^ȹ(9?W* \+CXq8wggƟ8:d1:]{o9*B;ܶG,rv,3"K^=xݷ-ɖ#َEJFlUH=!5~R#Ƨ`ygjocii{"K84˧iꫧ|7 nwp6P솗#7X6+AtrRvBwJNRNRh18 Ox0{O? W$znwuv^VKQ% 柕jHGK&Tj@ZX#x#mJe3c} Z5Sݓoq!X:F9 %Sm";γQEE2gڝ+.LkBr!&&G;$'J:[k 59Zt-ruM)R~,7;^fשOSyUrlWa&PRq Vd;gF ۪Wx>XwfW/R*H1JR:"к$"9*`@ Tyl 4N]Wv4a};Ux\*R927,GȐ+r2 +Kez]C4EdTg 8SB ˾ɜCMW9^ } SȬ`L BsLek/z=I:ܞ'79`㿭# #Pve/؅qH4L޷=RNDdy.-'ai=辞|jXic./y6$׃kK%3+ #9%(C#svh) Gn6Kg~tq4<;J#,~fqVZ]`,yեLɵV]f eEiG|0BrS47TOp)~˫_|˓߿:O^ϋW/<2k&} ࣡ŏwZCx%S/'.bG1#kaXV|=q8~7_bUЮzŖK٭ qW$g]lhu",* CJB}J(2G5o$voOBrwѠExQ;Q3@H JCI=xk%p'U8T6Y ӡW01$#世HnAfΒ&4Fj2́g@7n"]9nr Hr*JWD!'Hp\)HOޚ HIg\X* Cyc:0t.3[CV#玾;rG[%?v喙 !l]ZClb~ڝseߣϙ> M#Z0)b=rBGԮ0bI6yu>x3hU%8XGNsm#KdU 7c# Z+s`T#v ފ9N&+>׷]o#wCy_{Ybҹ:P<,US ijl%J m_e0{8!! B`=),eVi.Y.h^Xc"!/#+YkӾa0QZ%tt9T' /IIo C mV_BsY$2Eѳ ׎)k3zpYeYgLss*o>¿|!)@ 6LJI-9$g(q3}`s I&autlX_n檏%tqvICK/ (#'e2O:3Q@x&X@zW]*OڟϞt gO~O^٫EG?b/NO?:+G.8?~jԮ~TK* \i&2 Y@ Nm;Ov؈>hQ}P`D˓)m婛hpmMzZtOXݰvtSQ{ָ?]UV/jutidfg}J ^àKYtހνf aрqp1YLפO'Kɿx\ ` ax>/qָ4m`4Vh`u#Kw^4,Oз U/>,ϲ!i!x3#7sF}Vb-us.<ࣇ1-بl1ݯF}x)li CF:.Y}˾㗖IGGk:yte2>f:Y,;NH0Y8w,*BpH(t`vUmR*,G)z@rhXϑ읕*k sڂ>-wfzo-{08L/^^-ZȜI#晜ɒ>kWlVVNOfޮOo. ITab !l/ ۖ!lnν;hÿ~#'z n&ٌr,./˲mmw o-~M]*|y`SvDzv8떘D N [2+*Z"{9)[¾˵aΚɻqsI+G ʟ8,`r\/|cym޵q,BiU}Hv=eEP}0Bq߷zx%C54d3aMOUuݿ]S?`yOsc99/7Z< aU=F{!G[ pwdiL佤-+np]Ruu/;XW?Ww|unu-0.ߐnͭ?|uw޹~~osg>Yֳw%W}5N\nYsKW|Mw/v~M+Okډ@s`4]6,G)l9ĪqٕiP-Ts2d^Q؈ DAC^b Z5AEKQ & ڌ)B197b:(|"--Q1:Ge()mv|3C$HS|bz(6Z@ t!J\).N9TloR:12(c6+mk4O`k6#aA}(yq;4VI  YT7c.PєS@G:[6Y(tEMepޱS*Nm3TBu`i)"*LP!NS֚9kvJ$`+8VBc].<.͹P-7w:y`ziנw/ꂎkldi(u3.b!%NA(kIo1%o[̦e%%tJ6c#gW|aNe0t%"օϭyXc,C͸cڣ{P PIgW+ɡ5 N4*sn~ќQY!TՏwkZ\m=j٠Q:Įh_:@t!iiGk=Bj |*MDir ,~]>@trhFycEK)SaQHNQl,+}@Te}EK}/37Zk^Ӛf B({om|BƑ%S$TTPeT5 sTBN>EFԂ$g>lC< хy"F;V ]q9 9ޭRRQlH)$@wiNFlQ)4kS6szw=CG%SE$J;$WHcM@eHUlzEDF())9b;c-IrVF^ 8XIƱfP<E* z( dB6x!ըS"UArֱԶ1AFI$yC~鐝J-NTtz`րS$MFP7+\̂,%RT 0(|H-@$= аB+G(cEw=I'4(h@+2tAf񺼡11W UHx,Z)H%Eë+(Ey 7ice|:|HH P`d!{t&p m>eE_Nf+R94[*Hq/ 50o^LJ<&Y?nyGH/.}=4/r}~n~2V]v-F76.׭n -oFF{#f;ZZOdiy%@|AοEprLCPaG.˭}8j<-[GQiԵwX.zʜLYަ"U rv Wo_mWz}?3o~]Mվkejݗw^7lvPm"l^dUϟ-7ᅴvXP}zC zܸhؐzzyWU%๨J3CWWJoGuՕ#?#uK!F]UrdjT:TW]KNm7U뚕>:{vNӭb)3ɿ 3o,WK_gd'D*9+F]FZ0?\]L=O4Ka60iVea^3O_|> 8mW\e-R ɏ"NjV+|W/ujvqG՞ϟo>xl n8ɕBPUzvCRvK,/k?V]U;uUɵ\U^]U*aʼDuPY}NU%؛QW\sQWLWWJGuՕG H]1( "WjSJKTW a]}o/YWYO.il'{_V_x`]ygE jy3Q.!;M(Dpovk߈|Wy.y&6c+OϧY|~9׷k; kfyǕtwg+3$I'(<$$XdJ"0RZӌ!Z_oh4~(0\3B!ȦjiO4~H|ܸQ#iy)㇔W+h̕GքX(wA$Th.Ƒ*Fly>)5M$#dQkJP!AhTa.' "kA6FDt$!dN%y@B B8VE[ D} G H3r u$yV,jU.+1ҳX@x" B&MdZ) (,61lG6aW#?K(, f */XY)PN¾eAGh51jv.xW9lq50f r$z_30dg2gMg6iS#a8@ |7Ŕ6W`Lb.޵^KҔD]lSg=@3jH03޲Md;%!YL&eR5E'0(4KG 9=i@uڿ'i6c뚼Cf=8?)De۝@t?zOdYv[YR|ϒڭs:n)hgMrAbrh;Q1v)hX0mf㇝m^ 76?.k+]tJjdێ @٨I(D QF\DMɐ"OyBhy5?v_vX{SGޯYy_r˻fF4$2\bR.ʈ%A,{̱/yͮڪP(m ]F)"Id,Cd=^)&ƆjE| x1_}I*}wy_1zOY:#e{у1<ړuyɼߦ~9WqK{5ݬ:Ma(_56F2eLBNU`J*thb,YK'K-nMrQx\H(u$FŽ22c AVj0^%#)/7->f#VEDF(*11IQRά󬌼AqV%cy@^Χq%/$L!6x!RuJ2XX^"hXN4*]z@%e0G Cv2{($Wj{K=kێ > 6z/rޱ߼Ѳ$)3P|\1VZjUd8@dlz%~ Camx=1D@W'k܁o~A$Pg<iCg]w@2Ə ϪrC6|sHg4Qf!I719IhbSV Mm"3lэMu(l7RoTxFS͙`D!@cVj}9S(0d{nˎ3ә}#Lޒ*:Z.PP(wpQGXGUzFRp"G%^J/)>it $KyzzEpJ&fi-D/(1 T4V'S.yg+Yz@dk3ֈ`ybHVÅHD(brhϓyx! U>U>pKk_ yO0$L>~n]ՂjNfusu]φ,,6|5ysj*ysЕ?:LUXZ$I}+!A/7|cli ݰtIY5_@s}4'Ǻ=|(@>JlgpbP#F7ʔui(l!h!E#ĿuÑcRRMo&Պ>Bϐ 뼼8ʼ;*< mT7_/ԩyu=gdQ~8M kBv4^`^^ߣY_}W2HSrC"%d8ݵ*Z=ť)TW.5ۻν~6&#R6` [7ϟ[WmpN.l=G~h6b [Vlݿmƻ;{ysnoqu{33;:'>r_47h}ئz5mW){kktj-modi\c[\lɧ7DWWiY LX0`ܡu!2= p=dv+$Ky&F[j,!^k)k%LF@hA-(9^Ao{nʾOW8*)'9ցp\E!q IC1 -JАH1׊Yj֜I2%s T utb&G鉥;Mn6Nn)lcz3ih&??i|ڜfh'>R+e'qjeޡz!R-'831|x,ގ}% M4Ұ tɎ0 ^Kybh9km߼~c~0_r4f[CMJpl|MBFLg~9')4v/|RJǠj+HH`sF! RdP;lHA))]FJ* yTJpC$NvDf.JYŜ` j"Q츘8{ϼ k6SF),x3ж|ϢUWGCd<ÅY~>Eׯ]K Tz%&+Tx}>y^ $#a@#Q!nJE)13ry#p'eQPydZI'!!m e3ܜ2қ|# r9PQfq[N]:D}oR sD䫯\'upi^OW$[Dm2Mu0YYt< w^[]J]Nox}Vz=۫{{hz-۫v~ã[ݷ@ܹ+oNI2no=Htl/w$:xzj2br=Ŷ"wK_ۦƾš>@BQ,4So5J}>'Z?yGWws5MiH\tN釧B7~O?m>Nlkoտ Ccʸi@bT!ƽ>L L"tNօk9Ij6`5O^&G).?d ^&z?LZm+"`eRZrs\dpލݬf~ 2p6!޷~߅M,~·(5HמPHm~akK&0 ;Hi!#& :hsw׳I݂{O7h-3"'}Z7<lMgKQs%٠x/xZSj&PiS)!BKHCZfuJ$++ù"3 %}6#%x%`}x1›OTiopC%wr^UR+Oc*p0L[)R "P%(B w6Jy跬k|]#:V,`ѥ{|3 դ9ج]oMmLmx6]+0VZ֗F(+9w*Lъ[Rii!cjqw|G{JNE;P&R66X0R, Q˃TE"&jBA!Ǣ4F>#Ԏ&%.IKdOS@zƁwϹIq-/gOr'DE;\yaadZ4YTTgqywͷf a~kt]A2hD Wj㶥L US\RS8u&*1kb\Σm{Q%gYpz)s "R9єj\p!2-n${zl&Ns@{Bq!Lbgk 5E%(H ˒R# "䴮PG<_MB?_kkg!yN gHl0@d /TTB9R}pRJN w>ARمt8M!S{2xkBy"xSώIH>/ϓ^ZPBhHI>'Ȗ-0LoH0I8N' FE,7y԰\(s!dxԢ &j:BtqF *(M)vvEεp.#^VݵFpgV Z] b:m`ьw8 )"wq9:qm,Tp7r t#aj˫$+UYثrGR"6_q-fT3s8xL]A&Bm뇤~=eűbI.4m]?$I•sl^f,}ٮM ~W{RBlHIi1%O|S2rc2Zd֍Gn)/:Fq .6t1?{Wig7jcR|:ջLnJuS^ETk#ݦt6\\ѷ̯23 L.kq~j j&~^|lxygo~W껗?I^ū:ɑ;M{zV %/~>i}%%HZ>s,mҥ>!ݏ{uY\]BuXW߅wuQ:]4Iwo,N Yͮ'qN"{!((H=5RuDWqҗpC޴k=i&{c:`0ϘIu2;0{s X4xu7sO[~sVw#<2]yqНÄuFnVV)L")n Z)yb.hM)1*,a:ӃMb/-G buƓL"$f?< Sb!OME 6V)ȱިC 2bv!SpB8)ؠ F߷;חݹ! Ft/#{_fe9~g 9_;݂} E"b.G0e)!IJjb^σź?=.o_3fhK!-2!lO!cם_= B[ҭlu fnerlVoe*oe;[٤JنpU6*a+X:JquB !\`4\a=+VIB:F\~06]_@m~|84ɋo7]xWaRM>u`Ӌo&W7G&=z 0=8TOYyl;"yFGw>.~_MkD? Y'l^sA1Gr,UMnٚp'h ~oKitgiYeJOLI??m#,ߕ@wWEUMIJTM_ḏ3rV{r2>): 56>FjdE4*S7M-CT2LϪekٺݺ";3u>iE֗Rv)54c\kL+rҭ͎iG6+~_rm3:J9qe Ҫ^ܐwUj Z CUUIb"`\`^|>J#W$t-yW,m;!UVpj >ij1q 4*E+jqUUW_̖UoNJ"&NTnr>,vSkT2#z@#A4*P3rIz6RWG+N hWUʵ\Z#qUU]#^$U|iWP7t\Uwu"~X{hTD7n!ߎڊZt~{Ʀڻyt<#{>}]\h_~Ÿ9;M$(qZЖJ$!)3EP:G.`4gC~CLʰ\'EPUj1 G7hE_Ppł[ZrhWU㪪n ZX>xDnr[5+ViA:B\YQpłhgͫʕપU(*qu"MZZM;rYz:#ʵWǃ+g$hjW,K WUn&%9t\Unzpez{"; Fy*wX\PI v\WV='-6+U+M+jWU#WmOچpłuCrQֈ㪪4qJUlM3r}Njo(J4#Wd pN vSi6wE;F\[R7v\rnW,=wZCUUԈ# ͣpł *m+bV޻*]%АwРq t*VDwz 2ˁZ;q-e7F47TV7TnU[7ߠ!\`T3r q*Ĉ#ĕ-; &ʀ/ZN WRራcĕK%\Y!\L+b`踲@WG+%\U yW, \U8`U툫#ĕN5+L;KUuj1RWǀ '+j2vd7wc&bxZ}o僓_(w6,QgەA-GTe;7?Mp ꑮ.,g"^ޮ@Bܕ?^\\1J]VTBi#>[>noH]n2tHD}QwBć=39{O˟k}+yo/:S]oo4쥢B#JM%!L^CLpʠFCL_"5兾?,%I~M!ۇ^ZW;*|)g+j:\,INe*neEp^LШ`/4*N IQZu>` >("`|D> ۤ摺!ɘPbE2>)Ц-3A<^XUĔE$6Υech/}lrg@FIsf $(2 /l$g `IK F\ΡՐ]MT1]Xdљ%V9A,Dq$$$1s5) 5Yt4Cu:_EE&Wb^=C l){/{sڐeH ]B@H`9E[$5!Bbnd Ui%blt\RP.<8 [!>C.# -\?̲ K>9% kS \4_KybmTgc(j`EE|X9'2w%Cb C8^ d3$RvuA1CO-` jU!RcfQ1LhHpu'8G_nd<$X( ꛐI:Ci*V2T Xp6Lv%AV+dWVb@,AnTzCZq2F̷6QC @ mHכcEfJ2֜A9 q`&b1S|bDJpPgA*Qkq)y #3K!\+B4ަ2ݙPHq Ls$e丝`Q 9hAQ 2GBY'@PS鮣vU]T;jF J H/[.? 1r 9 TDlZ"5ȲS"ZJ( 6A j"b C$CuH*мGwU+cКeM-dPgmN3Z\@ŌUDZ1MDr|T1&PT1yQHa:IB1M`Y;7L|lg-kv݄lܨ^Z01}Y]` mӇL[oT^БV%SdIW}HV*U2P (yCs ~7XQ|E ) >@E&rZ5ȼ`|ڄLktq7X1FZi(^Bd%TB;뒄 Xut kR([|Pm!ڃjF8s߲=V>pD"(YC Zc~7Xd9(I@d(J+1A2~ȃR!*ơ"2ΪU%CʰE(3A68F)ت(jS,Ԛ}-6?v XI+Ytg#MGhTf%Ei*5^ZVE*~'XHhZ6H*tM>+>M-)K `n ڪ68hnmN]} Ҵ[ݦ]L泶\GI ƪ`֣{H7[`3 .-zLk'QEŨն[SQt5(U_$56Pӄ_ A2#I*väDy آ!9)lz@'3Uw][^tR"˕,.TP=`ePˌ`*1#K[Ƞ r =5`-f=*-+P!>Ϯ{`E^QH"NdҠNn )DyŰ aLw(ƈMnƢb${awg=u @ +IUE :ƀ6mӺ`fav+5iQc4A&E3| R5gҼLFALPha3BBvZuZ"v *W!*Z>@MtVkAW YWqdmQiû  :jNàe+J VO(I}!)AH w8QT5]QzB%aIPrm⊑4 t h zuTS.\{Z. b!C1 P삓PdpN %>tR@Ւ0[ Y\ yۨO]gF358F'fAA(Ջ6B?󫽸y>_wJ4˦#YRPZ}Bqŋ'ɡ3PFV?xdl-)FRϠAƣM%MC˯7uN.&͖姓`@W2K\rn1>=ʟ|J m+/bAWZNۗ{&o>ZۇV[7W7m4-Gt6:[V{ޓz{Dy.XɱsWj \0r8N *tBgM(5NGrV}@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; uyȣVq m֨(IqANR rŧY!9?o&ᯋˉs}1\p KxOl YT&Mߙ"bm]FQٷp5>x.Яu6Mt˳wNы2{CD,m0w&ETE4;&fcVCMuFEhm\%Tpr@GS\gRxm+GXYg+`F;Z/TLW*DW ]nq(t@V*;]WX4j~@tgN1Hh2zGHWi-d@pv!Zw ]bPR i glI'1;]V? ]q7D^ [tvK~h6(젫P:aQѕ7LWz*kCWׅ} uJ###+Utb@tC]h=3]=BFq_ʌm^9&ӂ'Օ _-*apO7}?Vg|y]o\b)_ LɿZuꂻ^kO|z9Z}]l.E)_vX?yφ&lE 0cKmV'G=ve޶[!7o&6֯XyD%՜?C鼼ޛa,^'eݸɜA"L!؄L׹y[1V'BյWצRyӞnl^#jJ;P:>Nu}~~2,0as:<;N9_ VӺ[9Նuu84dJdd}nt޿fH ؼ?̶? 7K[QEm:ƫD{qlHѫ}q>0cmY՘_ϧon[:,-_o.r>B@.ZA_~^R)-?]z7pF[aYm6]w_b}edm/"juh5P/l קQR\|tZZ.lI))-ߌVN./Ok_}*(.z|JOqF!7?_,ł..MO۫4G+x5joB.姶GfE W--G+u4zO?u:eK?Vrv{Ijx WjlJfjvՔZs+ a]p%v([s(K9ĝ9HzP-}{Ǿ% $-S&<2>RRU#zrVPTHQe:+C ϥu^쓊UPP,m$7Ÿ/ámŗA7~ /ae'3)vKlKlL`{,MI"y3QǬ69&6`Q罰8[~\n{ Vͮ!bMԷO? +q?7nS i 2rSOO_Woc7թx*=r=T7+0ɸgãb Psq[=;h fon^E%szx5U\%C׼ORnE9d=W,Vb)7m: ~*F͐4 5<4)ـ'i3c?'Up Q=#Iq, tjGQXH:yH2D%,㳳dU RXbbjc\z%qN"`Ni蓎ax˒ȺgRܛ8ү<ߚ$O Ϸ&fCz{]W=~8Zn.yYwn>-DSLORe"+N9l]EiTN{0R*C%sx*7t?hr,p#-ulN^Ȓ$W( ˜Hc@LBcPӄ:WSx|\ƪ_XQJ1,щuŀXIa SL4ݠ/5m/55bH`D$QV騭TJIdcL)"@`HҮT' vQXH"p4R O/F F%eٖବ1LkuhlO8w!ԻI HZL?1*4#$'Y/Q5EH__ݿjX]iC5L=}*@!sL:N|Nq|u:_:#Y>%~_D}wW]b.?x ζ˃ [aïI>|-g aUthS|  KmIw۪w vc}[Tl&GOjcG=7gF[GUb5{eף#llX}ɫ?a?ӲV?ߖ Qܺd 8 +Cݍ#!x+81r{w$<|< BZCQ fL#5ԕ*L5I1ā'}5vA/m:Df* ͆!8V[mEͥҁU!a]u9-./;<4ЛN܈5# Js֝L)汫;Gd\TޙdxkX5dL!j0+W{JӱT7LOOTl))i i:[3GQDF9H(琼O5? Eu0c@AؒHD} ~ᜋ.YtNkITWYE  +i us}s?>Ȉ2*=,׬ դb;UmNK`%jNyJC&o0[*:HT 0ג`Y$c`ͦlg&b@:Fg>ŬUh ϒ|.Y&)J^XEG CT>jgAN^HVdbX'T+$*$rXS7lgs_>-RGQĹ(L,J"B`dX EZaFY;]LOQ_X5 "̯.b@KxQEre>yڰ) jxIO6?2Ԑ%9"l45ĠVE* )GT %MRJdFrYgSa T `-f/9d5gA}!0B9`&޼)5櫸 ^K*^oJizby~fO}O^V8L᰷ܝz^ñ)+`(lu km,E] BZޣcsxlN Эa{ftothXӄ~Za1c9w6=` 1Z]CȖtv]90O4śI/#Qli;Rjbx-!4`g:괸#@_g$$#zCAC&)s"ZZe,Tw)B1)ld~) H%G{1Uva IHۈ5 D9T( e{Ş`T&$(DWEK[tZ3IFQpQ1:R:E={~᡿X^jex}O~Zwkw1_b/.|XkT`'itD-d<,y8;wAG? uS ۂ OҔZaAHÀhK&-Um(YS Ȱrm*F^v /%PbMJf7qz8WXJP H dr {c@Qta}tC'I>vȆem7o6Қ]^A6ɈRmg n￶n9%^>ͶCnpfw/FPW w? yvȇm7vݟr:3T7zjnЭۻ66_29fmwhv70C"m|%r>ø LySQ uCi 蚨Q\nc}{8m}>JpN9 {C>',rpliu94!p)}D EŎO;>D .YlŐY(G2B !G Ze\ie (>)#2d"HSÏFL=joPC-v ',,J(,Æ|Ñ{*a4(| Ð%d,D_H$WrMR9 (ё.{vE'E-aڤb )aBS1rKQBҴ ko؛8{Jo7x/Ğ}!Y}՜eE;Nn._r 5hx<<_lm>I+RYku09(ѤHS6HIt٢K ߤz+?JBV67n$ҧXKVm.^Εqϟk^%F\R&(שA H" aV3g{zce(pb&&10L>&n;-83MÆٙ=mIڃ >%%/}Q&cLA vᤦDrAeNP[ FuN$iA-d )yT5!+#^ Iцʶ}58aeqˆǡhZfD30$^k`":" .9Wu?B4m(STPAKg\p^CQRţ-i[E<-3bkp?2P;r%_gkRr / ./ 2$hQu&{4"(؄Yԙ"g!)rEQ81x+|ؚtta|x k1*qWxGlDXя_s_ϋGtJHE*4)\Yp RMz{l9rxYMѽlM}/+%pDTݑCt-~2kYj8T J[rrvIA0LJa O1\\[6|28mLsq?QKf<Œ٥&̉L&jW%w%ŲLݳkyye"e21Djq:EIssN }Ή層cI[:t1k6v:=!6p%8 uԠX J:0ņ CգԚ$,P9 ҢLbԚu:A 10vZP1@Cw ECq&p"!p8ZmC58OU|u/ɜgdžNMYoz$\jzP~oz[No:K\6Ke>nlٮF,mB!sbPVDTp" -r/"*NY|R||b|% A-@2P+∁Hd9g jhsd 4i7ѢN (~yi;Z/lU['h2 ĺȋODGA;:v(cGӎ\ QL8kD(Z^83aPr4(GU= [*ڈVZ8x ~e' BzfQCD^x[=R+Wƚ\bn?NS? m'.xIQmOZ*s"h޸U. '(ҙཱnLZ{{%MݛhhpPor7M8[Z얕~;7'ozt+{s7/y3Z]?͛Q2}yq5 ].8]*<±~.8$(*/ ʃ7|9{c=VN>=q@|hF^+9I5V~RI=#ZdX{5?&c`^/Zݷ;O~Ť|9\STRXώlyS*( 8TsX$Br pFȲ|@ 1ߟJDc4iEmb /BHKH߿ ʁf|a B!-QO"7pkwE} iDhMEVQNG4&Wp$c\\qiO MI0PO# ژZ#LI :)M2!+ cI%$hst%G*OkP&Y/(+8?1s~3')kr6*%X)ue\Z=( Ӡ&<ʣVщN17rN?ɺZ4٧m(Yz ^JQۖ=o-KmxM_7_+qTθ >@ @`K9 ,UP G.HH~i)+v?X2*'z^f eV)4/.o;?t~:-~X)fpW~Cί._4Ma5@ƆT! 8Fǎi/)O81{Iqt/=&w{7W]/SyfT^GuPջxghFHE*4)\Yº&Rz889sOP̌٨Ďzd_{={__wv'ḅzdFp EA;fAy3,cGt+p ]!Z:WQnhz]>y2`){CWR [+%%]!`Xo*hU$t7HWZME EBe\2Zy*4v(DAàZ0-@ +ġ˧7̸:1]Wxgp?2Ci:vAW|CjJ_Է\ʀ ]!\Fy_*tU4]Ab/tB+D ׊+#B7t hOj?Z tʐͭ.gh|,;fUX㱘}Ɇsc1al/r1wG0eW6p/`?{^g uM#8KR -\.=Zpq:vQZA)|x(Iڣ_}/؏4piɵ>ĝe)8kdalHEμlS @=kd20zEXZD OL3Eb9̩Kr#UG|]RruFgϭ,ٔ Gab-&(As(^][o#r+"9o]mH @΋} +\R&wT/"%IQMܝiLL_UWWjprpj*t ?I`}A$R"N_)ϯ+^cm\UW\LtH{XH &**~)pUppUDWW;3{1pU}ϓΞ])5p5•Tbmp- }ME-`+;dvCR#캫]ۯO[" Sn(}㣧lVql2Z|C{KҸQ zvaŮ'GNȷmRf:on舃fJ>SЁV8RPF2ŔE= 8"}1i(IM}ӻ&t7黫9$|z`mo.;VW}ncYWT1d=fy⿿wL3Gs=<-* Bgn\zc~0oLP_ ]' ۲鶃W9!&ݭ[y {擣pպus^jNev'[~3qr!uCՓnF0o~nJWKCvy|{sҕvvDZ(eZI JY,9\X1:sapJ$2qEt.3DM_ޑ.7zˑG?^KE+ jzbeuV?Wh{&'P eVx f%J >EP2m rWC#Ʌk%o6Ql>s2EhNۻ:mgv~ c8k0fqϛy. )ѐQ`]ㄎM;2Tt4#|>ϋˢϗ$HYϷ|gn'^K{|,&fLx)QdhDǭD+dýQ &dAKD=&x! ,gixcT^K:VONUVĹVfK@#ىe磫j+L CScB(dymPޡC/%$kzyEUVsr>"sscH/`fhs9a>0kw#:+^p""uޘ1O IDS>e61x>%୍a1QG 蟽{|ܒ?q>K~7/tg}rIk[n[j8.s@I+S=d!(H}'%UCEƸM19bF'=  >'ϩ(lMئM663u) 卵5䣧oD1u!e*$1p_^ 4adhbNKVj?KsR[΅^)K}z#qY>i䌦k|H43Ǹ`!:)."*4)4d =M+.} l-W*qd.s.{ge5q2NUl&,ۅX0LATA %(t+,dLeٖȲ]]ewd'ƾe0(o?r]2Y MM:]NŬz: ,fMn^RUzv|zw~bC"085%׺ĘDTeGZ>")GlDfx~Zu9oH6Oc^ӡ nᅩnwh礫1cÁ4ȆF6 m=3? Ebg^[&-pχ<a;k`}='^ԃgx]R/hiiL95m3@mm?Z7ԑ6 b7mЋ8W;J6JHG^7WȲƧ2Gp m}%Kah,G˘7TBJmRRFq+`D'98ש=?e3Z#Ǐ/Ǧz9riHv5̐TI΍4eQ笽5K[Yn<)F~ sMmHQ_19xHʓ! eʆZMOڔLi8܌H}5=وԳ#ɾo66{~򕫢A ;# [d,.*9Kdr,)YB$ÌQA#]'fYڸ SPыRĜ3)jڅPs@j\Vӌc+c!XxR,X͙f|]O0N|@p0}ml/J$X2{>YfU4u )z sQ0齐D*aەB٤!e N*%~Q\s1iG=<4CIgkQ[IJ2jδT1kǬTb(/VEn.6.xC&fyA&CX#2 #a}|Y 2*a5qaԗ╓ALDZh+#GĝJrؔaRx}d ,\KiTy?T=wF;-r!Fo@INp>J:indȓHg ‡꫉ojO8]f.S"{\ܥ*r(JsdݓCb3,j1$fDoz\<. VӎsC~*w_2+ ~|GTW{AU(ix?5JIX(Š ]SPU!$#y~ .'^ьQRJ0ϬJ,kW'b ZBgduj8^Y;J.Fw{W;M^k9UYqw3*VU;9ؑV6;x)[?m>N>+DN$|mi"GQC% 'G=9z[rDq`F˒‡L?:LL:A8bH!mUrčRIEK(r +482#9)B:wO'3+ow,)}n;,Wd㏣6h^Los2xh{FC*kCTޑ{S0%XH31|oUw޾fq7Б>d6 ^`^~75uC0E(#Luf^Rw47[Cȶyg$y6>zf{+9iι~.1i[}B)][BN(DiTD(X;lD.* YGʎ^F@3R}LnGbJ̅)12Vf9pĻАǧvR:'|.} ~7Y]51eX;p_fvz:Rbk sUvKC xH XdhH9}="./Ը:*/xA ԍ }+ IG!@[`%&5} J mDes]ƲҏSb*tb_K[Rsq ^.#D&Y%X$Emɶ(-SJ)F fd28 ܪ/ؔuEj{v_e{Åėjx1Š+_:yfJmÝiWcqu 3;P5EjxX.ϫ&EIlVj##vUջ_:PT@ @`x68 K9 ,UP G+G cQ~<ϡ tvCOP1ZumPv*s j 4;+ҏŇ" L~.֟帏b9_[$?i6=/NG; .@pu]g~r|$Mgq6?Mȟ.$_UqFxnǷuN@Y8-f|嵞I"_X7VT!iֻ^N7 Db]rf'a\?QC8  ,e:"~Q[4-Χmw$[ó7vް_nM<k89)>\Vb㶁I4^H FPrjѹbZ6}h:6 QkDFͳ踮qmOz~p-2\\{6fjفAhLM)q7OWBsCjZ{i٠=P`f=v#f54]2:W;\ `ma8g  }=!i(u:EeBudN)#j. ㊣rIEIr'3r1=Wξ!5TW/DGUp.Wl%uH` ΄ȅTND?2I3/=0o:az}:-qKMp/)ꅽUϸ-C) D2?ڭW`pi9(tC>I *9"^qXΉ"14}P"ZcuB_98kc^uY7D#,xTϔJBhTpRL!uKH9ŵUDhb#&5p#x*hE.j>OG$q,ns򏵧!! ?p84Y},d˞Ӹ(U|ALq'975gd0/YyuհzR\_$OS xmF3G4{n8H&?-3#; 2:3MZN,CL+;X'h0V=st>Zܳ8T2rEڶVHu]qE#U7әC/ȿR=˃ӖA%FqGgoNoOoO~2}N>RQ0`̣A1 ݧq\S3媛 e^[K>\^ʠlONw7|W#_\KMGYzJQWF7CTPp*n6ڟh!S:i_sZ607I찷/Nhh{u.26x-&=jj)oߎ@`'}mÊo=M0ugCR#΃A 9XElA$ Fm'NvruzNX9}xl}8Zt٢V ;O# ;@sϐ'4~k9|2iGYPT/Ľl+~}[gk`%/2,KA^:HízS2JmSkrW"D4uLg“$ٍZ;PJFf')"L,ūN̒dbIC("rDEXfJ¥,!W :& G:gG:_|=;$HD{%% 8x- UD$8<А+P)7@$[Q=9 H.$ h/)a41rn ɠؐ'TT-v`'݊Uu@yU"',@k ,Q3x/cDQ*x 4Z(q뺬+F"UM x1% _Hy941SI:L`u<2Yߤ;–cs< (х*ٜ\ІH;4WuItOZ#UU{]*H76g(t4;u"?]C],Ɖ˳8)^|CTmwoK𶣯OEʰou^ↇm1ͽqp9Z[#ԆUTEVZNtPHb ixe]dvu\dM:Кl2QѦ\7̅3teyK.]K.QK.GKHw hC 5O)b u`chQu242^ 9:Yx㩎c\k\ "8Or+`$S.˰==k,[uwB\"K߯ݸihE?/4z3O*3j+g ;+siմﭰ3_`+lʍ$j'6o qۃgc-J=/j]Zc\i`\!yy4#?ňg3ƽqJ1YC,1er"(cFXg 9˖+C8e7[sӡXcu[udpks%\^`!j H>N D87Z{K!Z9_)se:«Hg<@VG$$KF0&U;#>ibHRo(gL[Pd C,ֵnxO@4}/g@ ϒcv*fA+4K2H$% m1Ci/Wa!%eD٢?:B|#%¯m|qp{fGx{ %FOqF\u~?*keH)O'yځNZXZF OL3eb9̩o#oTqKz6JU"*4M4%&2` ܙN HJú/0-M}Օ13ꪐٮB0mWWJ zʑüXˋROUgU _d$'=$T80zLpSb8i4 >3&'dyF}  O?U^uGY0{?L\'@z{&ko8Hw/.Oiuf^q:Ah]aj2\.-^ So~N$3",@ ,3@'ZhpCW\[8\o|&wӑչ>>3{d4^m7n?}(y,>O{oɇ(zw(KS}rߣ ws)QdQD{$ ʱ0ff9e]~`{O4gq٪ІiYy$s̤d|YGt.(nl9 l2$]Y(t"{GI*F̑)ƗxrDZTa& /1d&Ǹ6[ RT:V$"yd.@X* 8I{նuK 6o.5)LfUVYKD6&$IQIێ mp$Kȁ!q*(c$f9c2!Z9CNf>9m-\& rs B2cd.Zo셧7 sqJCyn(ԓ{'K ŠOA9_VӸ7ZѤ.iyz6E0=_Qٕz>R5`r2*yO+u˹QYQ(} 1KQ: GcRgUk`X8+#MGR9Ef!'9GXe]$㜩y.\8!.Pjg?V472՟+\O_.5Ā9'dl[ q{U,%5)+%׷tf= bLah(K>~ hf?sfUnnuMn+IZ:* 8LFJÚ_Ëz+rD+J{Ikz /5M^L.~B?zww7G/Go޽?yx7duA#0NXn"C!\k߷o릚fMKP[4-D6B]uf#sZ z_fGjuV ea5'bSISg$g38}EMտR%ݒPYUؔ?iK~}}U4q'?I<_="72O|$JȡLUo^LD$ ,F&,7Q8xinQyG fɭ443Ji'V?ﻻ<*,:]r쭁U]j'I`-id4aC<sFJ+Up 1u<33ϲVHaȉ4%z,Y ' Y4"V`!% 弉dR*a={YGu24 Ȭ@QG@)H!*+ 4)_ OzY9ӠKi"φ1 jL Bҹ#a$yOuP1O+J.%Q9 FV\&%2Zȓo.n5$Bq3XFۘdOƒJ)yS:rr'Q *Nښ[!Jl{|hGmX'Y8QYdY541`pZ .D`FnkI|eP;ŹWBģ9񘃱 "q}C5,%.^aEz/[jzsGD >$t&Ol"Mfx@CEԩ颼Rin 0E9PeD$ZU NN(79 N1]Ts7WleЛ"4enGᗃNU￯r$^4K}L}):TCS%e!Dp%=2;-x٪s.QFTD}J5 Ү=@$-izHbד4bD4ȵgZMS?ï о.Kiٯ?^YԙC0^Q‹4V W`l>]]4HįL|2ףmTRLU6bsZ]֘5meJ;HDJ;5^5Ior4|J&s``C]etth\'`Yv,+h]kQk(`auD!2X%0@fR*x2xHG'`\$'E蒡^SNCrg:zJǀdiR427d9/Nr6ygp5ϱi?W6.߼؅bal޾,gKn=wVb#]0 b:DxJ.+)'VؕrbDrAϖ+T ٕ{Ĝ MM:<2AW&sr%Ҹnc=ŘϮw%@>E1 GK 3)[i pD.,6צ]ad%f[ztR\9S"& PͿ9r_  ]ۓ!iQg"|3g}'GpۣiF\W~|UM/+AJ8~6 hϫU2W\6Ae5X^WifZGMHJeJnO&m-adVy;*qc 4#zMR(L #栘'5$M0@ ujqgZ"m嘏@wಹ "K${2Y~nI=zY, dbbWz8eLYKIVg%l6PhePʁNwR;x%zx<MKŻ> zEYn,4KAxyYkWApθ#J&IJuCd#)&.x䑼|Ľ|YW"+/޹(*%ŭg$G`Y꬙GruÇI T @3*hZhU.*:, ь1T%QS.EjZlap3\pX3a4q&,X&;$ƜŠX2  %"a݌s)v++ϴ\tKdjl*0#oyd}UYHʈX&r+c\A9$3&ʃP25Tu/] !]֥H^gmm2d yf1!s5Zi*2|R톫R蔜LvHN&ș[K&8T(WJ[=dzՇ`}}P ~#؊#'}j Q>ZPI(*5W>">e~8Eem?䴂ff3@S&D(G1DhY`Nt%6s/q(2(Lf#$Fe*4r85PWpX{ӛ˱2RdII&lT% z5G瓨X=5~F0G76LwIlaS XƌָY {@6{Z۾nbZ5_~tOIs2s*"u6E!;[dt;$KDAd*~=3{ain>;m3:oK7I|_Ti(5PM\lǫcS~ O]rpqufx&swp7kʥ~LU+#ֲ䚬A cKLF&l@]Qk6w]zvOEc[ n_Zq:-.l= ~l6bK-Ɍ[wv~]nvyf~( u79n̻< "kz-e:pץQoY|Fېm!!egm{|Hth-a:Kq,mqa-m. 자<Ε ',\8:& ͖kk$?])N~w/Z452c)ZKHX+dHi6JR ϩ &K̫o۬'t+L S;^/;>B1Ä#K6Y&( (ϼQ9+o0FqEk)O@/F{pdF%+BDs!)/#JjBZ5]rit ɞQIbv{{T73=+JWFjʉ$Z os9IX\1f% 3:i}ݸ*e;1{OK8e 6ƑL0G_JG38bJ8dsjKjV& qƾTns7&}zqfU"VO~ۀ`aПL_Ķl`R'] 6i=8!{ w [N ]ש4eEN\"6B^AFQ+I9V2 v*EvGøbԮ싐"'T"l-W9*+L75J@jLHi n!rfTo$Ԑ2@-:QDQLe>p>fi@2iWN5rF/+`<X?DD$ T@Ss#2DWBhHu*W9RյvZh@8I f"j}0TRLdB@MN3T|eXKQ.NKd%{EEI;MY3BAr&'=*Ddlʝ5J"1keCQCSոT!O@Ut~q\x$GLq6Wi:?e0Tp{d(N)́d3h( 4c#\Pzt'HV=(h~ ~rQ)& 3,1 *3nwJM@sG_4y7ՏOO-|A3^Z4cB/EQJ*58Z%!Zre`Zak e"OAEo{V$/OUY?h2mI)u> x%*)/F*d:c'H1 I r= K(g+xl(M=hVdajQ G)͒j}6`塎LeQ>7σ78ɸY=9>k=*ñCGGBG CG|Sr;7_,K3O U bkW2vGGуGltʹ,h >d]B9SyCw条WF( kؖl$ Rp{Z |7Go2J/\0ƿhXT&-8jPߑGPG}ӰanvmM͛QT6IxN h%ofE^mfY-TK{W_}"@E8+p 'tǮ`ś޷mz{ oɍ}w̷\;cڝpβ5wlbm$.Qz}w0( /zqƸWN޶A7/cﱤ/Kxo-._y M3gfx~p[]~$Ysh)B8pc&d7lSCZg}w9e;OCHsvU^7l!+w}.sj$@x-\{u2d1K0 OrieU].._۸>O'Pkv ߲~-Rq|/觐ł.6AxCq2m$~{wۃ[?S~6+:?!4d!/Drû]ϞD'J퍛qzK5??|͓y'QiXҔPHYJϧP}R>L4NgٟgNɂ_O}rMx-[3kzO{}6G;ɋi<3[8pEjO/u{>j~Uk;9\/m={ir3No!7԰.UG/-;}CnleIm "af?_z0c<^x 6~j~BL,H2t扏e3H5t1g½;hYγ2Oi8 i94d8O6ʈYe*`sYgCt,'tpASU4U#<:Ps(R&c'ȏyͅǯOEk{oM_J`IҙXyS"|Z3-S4R0'Jdl'" /?L N08U&cSiب!Q>X˞}%UUȡ8$e!>K)mMB?LRO4 Qs>vGbWhiR I,'ƇHPޢKԗ%*fx RG*`#1. k.ZHTrdޱ;Ť8(Ѹ|y]3]V09!z"@SeӐO1~;z/ɏjskqa2<}ɕ7vdu~iOb^ =^mͦu?Edx6n~|y8]xJI=>*Ce}P:K~v,Ut6Lw';u^ҽ&;u_3ϯB= h͜WXs#v:Z7do c[جo/Ч˭X߽'u+VcKzt=Hƺ_^~xSZgq=\Run^ssJSM]&.!4@5JOn}n>ffd=$r3;z,SY>vVZON%^}X% ̏ƒK䪣xj%} 3_BU"X飁+ WD-=\%*ٰOJhN1%hW\y4pմpTR$zp%F_t,\%:JJz,pս?*g\=Rct#Y iF ,[͊)ܗ.ϹwFwۥӔ~cz{jl&[j%)&ŨfVX>zk{(Z|Ҹx -Zc1TO?⽇/x]ۿ^S7zG+zR)IOPGv-]7zxl8]Nhx!3JFk[h*tw8!w^0،IKwc9TG^oBR7L"VrqK$%kHp{#t (WKM:H ǓJ;f2Rʃp-1Kȳ̡31m=!tֺ(ijG^ubpE>tSF(gT TS*R ZԌ@: J`Ex!pvZc KAThªqFrIo$9+ I헰g.,V Si:\@6WЩ\TKA*eDBXp~ʅߩш`ze# QY51c$DAQDX*|c_LVEu11Ȟɛ?L;6C?e{f~a:@ƫǸ&?zn!=;n kƻz6_jitg'cu7@O;ݧgU94+,.PhhR1qV{tHbaiEԬ> STާlRS_ <~tufK >T*htiwj֡s$q+"T[Xװ^u6mwV5 dJ wI: 3/nϓCu=nX+֬ҡY+n~PFGqA#0>, Ht,YZTRԠw}hJ^)Y)9uSkw 'o$sB`aM-ABzC1741scB<UX-zg՘Ua{-#cAWFs 1팚 ʭݧ!>ͫ[^īGYl S{Ѓ\b<0cpJh4[%bVK v,H H@.(qCvYI(:{@\fAFÂ]4, ;e|q_l|җZnhV|% ZL_UxrU;ɬ=Uur5kwI6ˊ;`Gjah࠲ϤVǀ",=L}Ta%DҚ(GkQrJaIm *Ffd j0DȘ1 qƾX2cX(mu4vj;tiNWI[ir)Q0ᴣ\[fF˝ ԻhS9,|V3nY*3$cxTŬ`^e) Ӂi]I0b̷L̾v6P{`)N iQk"@)5F2稜X@X2K %yUMȅ4*XȠbVA D9#`\0G2HL89aeԯk&` "f}QgFD=  8^)M`c1-w1i q1 J% \eHG.v]HHqbPP$^2XobA% 9 H\̌!7x?>#Cy?"W]ժNTƂy ((fBjthzp{N:T֗TYѝ&9Tc.O,`YdA}̆:FVX e)DO^C9{L`"k"QKihJ.&|r[9~$9rv9AKfblShL&sh:w\TU;J&6S)2X[ uPL0c(?{Wܶ %iCU\ZqzUu攸H8 xH$EQDpm  _70 PBn%LOWIolq29HJRQkr:4m@[<*s.x4c->"V8oF('#Ea(P(/^sPNq.w` A:I9I29/]GI) r/Q-X:_Dvs")6KFnBM%c5#4x Z'vg=$D=K'6 Qz2 L) #2 ##oĻOj=z׬A[Թ[ }{>%Exmv654o^s} #ur)D 1PKwQ ?ygLDI`Q"NM;l6'Ʀ/`rtd6ڕO77i=ʵּC<V!v6k`-@ ibW8XA$Zt> u^Zd׆>Jͽ8s0a ǤH>y,PL9NpUVBzG@`=76\.>k2=<#:j>1eJ2ms:*{%H[(33vOW.q/>ɉ3րҭhrM>mB#'ѻ݅`$?rvϻ=RQ)quϓw'y\ZuK- j_W<-Xf[-HLc,BK[h)eneh>TCy}ʣhhlT'2G]P\rre @1\JE3 Fdپ,>y ]6jK dMY,by8iEŁhIo_P@!<Jmy0?~.^IKjVfr 7S56b@D%yPj x+( $[3闵W't!86h%NDBca})4c)4dW9zifBZ$E_^]U.<q責<:W䴜fE\4oN7^'67GdN`!")ruHHn@=܇ѻ:E@WW3nT׽ٲ̊,,[/̊y,̅}Tn*c ZAOgpa-d Qe gHWTdҐol90C( _%ݰX'7 !vZGO{p⥾iS;f\k>lEÍ,\e8u{Dn%Vy ffyַmǿ>em'h]}z\ayˤ1}۾Մ8Mݧݶ7f^ }׭f^rG5g,yߩ_Њx&eY=ρ=l9k6 5h8X޶ßQ*V~H6~*|p>Mm$9) QT4 !.j[BBCTv*=t.{\% L(qĈHEF[X2F'b  .ine=29hEѷLF~`Yn;rƚ~L{ءGBGR0urhnopdW+44$%LO@Q'{0Jk'"vw>O #g%ZL*! 4j B3CMP3PH,`MA b%+mGzFudtd^ Ợi #VaV7_>4)?[jȌiJ{'F؞BUA΢yW 2 gJgƺNx[zD TFz_r#Seଠ+g Go4ѭ7 sIQ.m[@WI=ٞhٸ'($3iԥ:椞;jgQ#A >EX4!* Bzu.j5> lRX^dЭヵzdr`ڔ *r#ێ@z͸1SD<]6^ok~,%9Ap2B.Ty_`*D.\}$0EBHΛN#D#d^޴S bx@G2(N 8 =) G!$jBL$A9Zq Ѡ4z לn̯]H_<:ӭ8h` 5"КMVQNe4&(sTL18ܑQധFd~MIv3L/Bz6N|jZ&#,0iH$(s+i!JA 671^rDDÕ}W]t͸\Auhr~? 돹wݕ߇g_4Oiy6(o~~:kEr<}s9uw>j~Ջi/i_Bw{DWXz||%ZC?q(%3rc;]587Հ9Y5z?.,Sa3Fċ?eF뿿z.xr>%%'8aҼdz]"틿/_fޯQTݩbQMqH٨}9PR[@s 4f:R^/K"i΢`-WvY\/~ aVCuG|c٣qJ![R RUVEf Rxz*fM1_ƈ-/FoR:"GN5j8\SS(-LtǷYr?iILY%цکᘰfEYɬJWv kZ5Sܙwu--2mi:?Ծ~$:tƳH>R$%Sʀ@<#&_WpB&}RpbB Z$8Z,PWO/-2|uo=C<%:*s *9ͭ2q$8"D(TDPϼy;׊yQihqo8ɎՄVc}cShBxuG\󚡔o`y3b1a,@--1-䂻dQUDt!Bs.4'0h-uȄ<&Y}Y;y< hHG-c2S@FDDx-B*C1cv^ɋـULsIoET/xR0 <-_]%\>1^&C&#/s$ޏ\Np}_>~ԊsCSF~TŚqc2ep,ߥ-j^!gn?PcOjW3 T@?DԌhUϭU[qQ/|Yj" pJ74z:뽨UW?zٙA󢚳qu^Mep8}Y&6?Q1\mԊ뤇X7slHŏQ/O}_=U]m `"NNW-j[M i^p hxF1Zp)H&٭vHtڋ}x>Efh[S{!78S fDrF&!ɥрX(KZ5tFTQ#e,.N/V;!eh Ѳ 9;,g t)\Jkڜޭ[l%O>uᵬv.Mg>nɋ:Ht|ũb9# 1@V$Yk2-2[1|7smne7J=S xBeBrg foB- B(!owbqwNJq0(8יxla݅cr@Q@")$paXEjj‚@m~[yvv:,Fr߰ONyJ?$v{n!qf*uof{5T?g~N|}5v!FuE̥?LE,vA6*+v8_A>[jݝwNn6|起YfW14G̫wpYrqePSm+#޵$׿B, 宮G Y]l23ȇO[YR俧")Z-5`,佼N痟(产+x$=H1;Lo \#Y~cK—ZoI:x+ǟׯ^W߿߿5h^KuIA`~2z A=_mGţֺR+|.湟;%'i[go78JmBŔ m4,O-nvLj_qoC9*NBh݄@G.1.*O_wˡI^/yO !F6죂ɘD'QjZA[ -;<;7y?}FDK*l6-Y*TZ:^|:ZיM츦OnͿoѝ>ه f> ƙi w>u]Ho泓qs(;ɓS-`)LuJyЦWBf;3yz?gyz|l',1q5gJV &-r٠N6ʫDv`>r ㊶r_on>5lrkobڒpg"bj1ȋ`M E@WXD`qIi7د+~ʵ8ZҭTt)2:O$6sG-U<cxJV)RZS"njA|oA2DF%H9Iɡf3x'% TT,2'Em.YhX8Y)B ڶmC B}]y\5Se:NrR^Rn( Yk!,^>E-(n‚Ђ'r6Ru\a̅ϖ10z3|ra1O7We0SMN.Xb =%xl)J % G85\֕M|#3KUI.Vd'DEFb:jXfr E7f#Eom6m) }Dvu>S]6pCHa5sLѠuTeBMhݹRMǾxGgwPA zsk /槃~Ertvo`>LH' d=M6lT3O›3t8v2u|7%^_^&pvRӪ$-hsZv}d.dJb(P1J: 1g|Ɯ(Uo3.>tD29ߜ2iZ4M>2DGɧwK%xN5m|5ahǷSY\~'ןЮ+K`qN;qӎ0\ AȮۆZ#3մQ(%՞$Wl|xM.?M ӳJ\F*SoO KQ5} w'RN~ 8$6bVTmaUES!bY<_.CAs7G(mvIM329&v jUqagp2riƖ{,R{u+-Z\h=fGkڠs3:3y؏3ձ"fԫѫz@lj1 8W'qb xqbsZ+Rn8 \ 4Ht%(ɹX;V˦_]h쯓O)O0O^ə\eh0MeMSV^~7<4Km|.wp" 1A4뻻Pn/o,}s}rz{'W,ߗB gmdeȭ|Fr7bˑY9eMW[8JѯS @2y΢i"|7B^;eF5WsFdjM'+UӴS^Mc&;s؝  H7`BnhpYE7ZcAPu#+#NuѕL; G:@JUkx(tw" J`+=>k'S$Ӊ~Xtj(ͨ] `gCW .&A骡䑮ɐ] `a0t%py8th~^Pg};t6lzwaFVj^{Vh~j+{tFڵ顝;$VUIt%hUC2já+-j;n8<t骡AzoDWpU kGt%hUC2:[ɭLOkH2klSn+4u쾋Sʥ->SĖOˢLb>0&Doo͓ F `oϬyYa K4 [. myeDhlȟ8eU"bR`;T+PsDI-[!,[B:xMdqgc"]Gb0 ௞lt55=^O6DW 0Uk:wj(J>Tjy8רLjh5J4#] ]yes$̀t F74膢ZcJkFphL0Е01VW19D 7ZgNW #] ]9m5Hn BW ƾUCz+Y!0(5jpa0}W -~̫yR#]= ]1jCbՓeNW C+A tPC]MGJRAj;s2Z'Y2[tk)DW؁ ]5BW =]5lG:@dJ\ᆱВ;] JFuutlvr 4 rpʹ}Mu#]]7F2,*\d.Vڮ^i+0hE:_8/̍u<{ObsN;1XE],kwϯZ%u׈?vɬg]NOS"],oJkͮ/optzu}~)ƲYdkہݬhKP٣zr op5O<٣f~?0+r>w;Nފ\-{r[Ix/h޼}ND+~̨lff٣:*B$w(\1Fepvqy\5f|1@wht#D=_s>ewk.A*\޿8OtWo68r |) jk1YGɢ"ZHTI>Hߞ|O琴|F(aweysr%kk*Ő:.kC&":;2Hm"Ȇzvzѱ!*st U!!6mq!ƬSF]Q.@թlCvU6RH4QEr&Wb5gI$mMС.9T^؃Ҟlh])P핏,1'i ZU!h]v-ZZHr:L$:[՘FQm+ʺ"`%e#6(*\6 dkęĚEcV,K`LEPbʺl{ hc&&}.J/yM!;1h䨕I٭І];@M!ikYC!*M% 7)#H1ih*!]M\] g$%ͯ/oy;bB u%4bX1Ho4i!N5YpBʣ$`8قy-|\"9oͫ8Ub%sHTg ϩ"$ڪ"{U 1h;պK>L Jv|a%k AZk| 5-Kjic%XecVVɉ$N\aDdVւ i\LzRIꐴkj2'P!0ftY"eC^ Vt fAABDG% H ^].]r*qa VI(DZ1 Y&a.[cYTB3 VanB AvEm%˲fH57Jր1l aomC @ mHכcEfJ2|kʃ AwŚ.0 ֎AC\)L1R"588)ԙ5T\ bS GgұB82+B4xoSCMpf+%ZM=n'X7Y*E{eѾ`$Wm (ԩ0^itW.{/NZU]%ٗT}FXyWP)[Mr^KYvJD`dV.2Db0۽@VQ=z+p"5([gmJk:~DFŌUD:b֛椡1&PT1yQHa9I #NX&cr &^fh՘nن?oFՂUw z$M"X + o b9PT8xi7olJ?l;@V:*&{H\C RUFB5L:fn|Ψ A9AJ$rDOW2MʴNw5stX= %$0AcJA&ԭ.4hGgp6*YȩՏD?/AZź(7YK1e@TTn*BMv}׳ź 蓥d}E>Z`Ye>: ]{ #BK~u)}=A}L!%:.x З9Ϻ=| $$L)},C)y.a3m>%8ZuIBv( ]BZx _3.D[&4}N(Zl,<^ `D td%.(AIb&#Et4$C VAgUkUGypPBH1dlD/B`ƖvĪMPk>؀MGJ:BzБp64IxF5D[Tjj5tꭊ.~'XHhjw$a:J` ئᾠ_AKCjM.1ڹ'y63.q OӤ[gӶ\G$YT5#Q0=IFOB=V`&cm'QEŬcmð\k Q$$Ѳ05AI{(_ A2#I*ÁwâDlsE7* v#bs A%\IbNvPXfR 4Č.mAz |"2Pʽp}zd-+P!>]}E8ij)K$\s;-y/iLg("[T1"@jQdFb1wXvw\zH` @ QYj0Q3<4)hc+7 [H uЮUP%HI21Kfj)څZuZTkBTt5щ7[A[_%pgQP k + (|<U8`AFWX O0zʤ輐X?Д n F;=>\2 'ʃzOQSp8%]kW̤Q[1xzp4D ;hͦrܛ "j9\Y 5iU.#vS,>tSՒZ ]\ mJ:Q!w9Џ,0H5חzх?Nݵy6[.\]i+!V;hrsMf,-]r _ f &Ī:SB-;؟ۛM^`]M~$Ѭkh"&cf.uƵMŧ`Bhwzfnq1i6_`KTLy!iFnI1--&}h.FX/_Z^|}[%S[>bK ;ѻS/0_ZPCY 8"L<"Pj.d(q@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 M*E9$ NhL @'"Ѩ'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@'3@OP0$q(I L B9 tI n$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I NnH/퐒@p@;$&tI :$5QNq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'N' ťVZK})]jJ{m_\7ߥ:_]w?*!K \`\\?R.ѱ`S.}>@+m~%s}@[iu)wt5{1~,;jn'L?G*ѯ|l(ZEmNb=D m0w~~Xf1,wT,}4OӚg,ՌZ L dZ?J ͝uNe~~Zyny-tFnq}5_-cpVn I?^R I?M > muzaEWᦍݛX%WMuFEoMWBPPΑ.}azboTUl+V콹ڔ)[L;<$8}eIF%վmbImKj~`1}16vo{%%v\(gۦ|m ],5ils!ok~H`\/wrrrɝR +PІcP!N%€0\; ]Ze1LW'HW{!pJauut(:$"~8tEp ]M_@(U>!]4}z8w{&& ?zuE(niSoř8t`k՞pzUm4sfp/]YtZ}9S/f@t^v(tEhm,DL%{5 ֻc 2F 'l ҩ>E'\BW֛c+B ҕ2!+/5`*1":lr")U;; JC+B^hHGtC^Ptֆ 4VЕ|3pw~^U}}/:{4ԕ܃$sK(? `+`}C= f>9 `ྶR;]JtutUQ zBWV$IUw&B (Zxv^~u#|c;;Kiz گbc#WF,-{U/wh٣|z6L+< uc?e}eG^EywsY>-PތOYGԮqݖI5&|^~e2 ڞ~/8dr޼G?\?cd;g?C_f>"E{-ͽGi7|qx7.a]YR֫xQ<0bl0;x~6zv/yn2!d&ElB&v 7DSjYTK0[ϥ޼A3FݒϿOR{1F էxӛ]`ev3˾>7tDd$z1Kwq35|uWstq=Ԅb8z]!ޯZŲѮ_V*Z-C}x~?&վ^ص3OMn.n%SZlC>8.??ޗT|9.];| U87*NVrs=h~9=<{Ͷ5 |i5QgSx^Iy[ouln~[GRi `\5Ku~pP'yph-\ 4h·M'&/zKWdJ윭Qhl9{t=mcd6d/w_o/<#}-,sZw3ѕJBѫ˗C/Ў&E27-kYbQw/O*VB 6,aS%ZdmM1WI.Z%ZuiK_gf?|/l泟;GzI}>˫[Ƿ?v۷u:+/qÛ{Oe֟n *KFu&]8ʝIէz;6m>;q-]ٓN}_la{J*DۅF qTBw:uMZm-ZS%ݳ}CIg.8IL,šL>vؙgL!u5%t5c J8-l5$eL X읕*S\[hz *gtֶ [=Z>ʾFQ́U|]eg#k^[l~vK_?n*X҇7Oo}}$K'; f7ؚCoKc{ȑWi䟽m| ef/ &X`gb,Dz$o[?$Y۱tQ7EVb4\F|% IW>sRsE|yHUJ#SL_uP|JPƓc%M^XeBhkjI"Г HUF*A]atqmTd&U=6Rr͉Y͂e9ʉlL@ADR@^i _j۳as})Hȁ.r*(cdJ/shB^k 1 H2^m4;ߡ-\ʢ Bs@9~!(L11Iw+Z79+k8?F[EJ 0!aj>* xjYSČ$MRG^~K" s?ôGPNsiR~AD'z[])qiYpTG(A$&;k2h:#%xvCW/-aqVZ}d GL62 \z6M ,֜.Ѭ-jv2 "7xQF,ffӢF= /.Jgq<N^heM2ΙZ=9'[qJ&''k(po+ϴh]Kx>6a_;/ŅWb@\s? Fy8xwέt빽 !V85XNh5CPMXjHq$7tmua.3Ąqh,K> /=]98v.e }<4)\H1?ո cS;:5m;U;׿9w/߼<>y.:y%Y? 0 m$8`~y=C뮆CKP{ -DO}ƅ.y˸7GkA\F|[8rL._fգ,feďR_!IV?㢋?_JKBRB0 f7ΖH>Nּ~xT?I~CP<,8''p~$JcIġ>waFz~?Ġg m:D0,F"}ɛInQy'qVu:Yיl~@ݺ䮱xJi۷ 8/צ|bQykε.Y;`U4;FH}6~e[Ok2tAo~0('Ō(_kr8#Y#L/!gy"9^|<:KRv|(& l9e,%3#'Ync6hZ8d|)f]@1'6h7VrE?wC]ѡANSɾ "eNRVQ6rsG(m.;èL4$w$EЛ\׀ {LrkB5R$|{:3Y6ZT!` #A 3#VXqK[wCM/zz(Wd·LuC<\}T)Yt"ւ XZϲ S{-^<P3tC}Aa05&א|LHM&D3.yIYv Bh CAVcfJNDP)M31Ryd0fL)JRu)d0 HgU@),,H|N XhXT'LD*8-H|å1em圾͡o-sL`&,gڹ f'|0D9>0:x;]6jl>|@iF0X/`IFA1Okl)Ac!Vf~ xgqnܣT\vjg2Bh'ʑu<9KFdW?Pzޞ!]eR{GNx*bAOJ$+cor/@U*QkLIk+|9GAK)LԚ ):wbY`u.+nF)ڸOcRx I[[tuPײE'Mh{xٯ>]gT/}Ľ`唕K\sKIyE<i'W\$&k=Yk1Ք"djҨf^X|M)U2;gݚ 4SJ"E˔N*aԤ9YzBer6oW{nW vR7hb9coq4QJz3o]&i!<>)rDtJ$. TLNqJّbJopD{Ox9sVY95}rȘP9>?\ATl~-߫̄Vii,'q N ̥}4aM=4/|o (}Okpgm-aD*6!?}LLd T4Db:@(XgèOh!Bgg,Ϯ ;ksc[AWL8wr#+8M.ǃmR70/v;.x{zW|tgEܾ=r+KnX=$[tYJi_"w667s_ܚ]^h@?1]:F5nֻMot;gpylڂ[vy9^Qo;$7loywwyRm7^tX_gC=rBޱzŚ_6wc@m>gc]>9rSZФ\'Y|HO-@:"zPHVBz:e>8]-g}^t  jit)BVU%mQYŝ {`{n˾EeЂ~@nY×RQrB"6M$h Yga˨B.H*)e2!'-@K9RfF ꧟'jO8g.5)YȋEd/vI!#8#$GmrdݓC!fY)Y#8(RNjŦam>(V,4MIp]ひD\u?,ɸGNtXU:W |UIJ[kl uXs#2xM=R^qvW⶿u>[Z+@ohX3YZ5i&h(k:uq{ tO.1 dVx fCf ])T68㇙p by'ɠ\0VI0Sk)\nf[)RrB*)w ׻4$o\Kޠ:}NN;:d<E1[ɠN2ꬹM6DAG56:,0-*X#;^ے$"g9d Cuzf@]݅.8O[܌^Ѡ]^Od'>`K0Δ T(jE&} FHz"mc49"se乶{CnI\ʥjC,(ӎG4M% ՜BY.(s5TH!ad8 o.ELCq4GcM;OQ1e QѢ:&[/fKR]-Ch[ } Fei8-\̏4#4M.=V$WOCzy'ßf(=jtxLC>^{} 0"a}e[ZDu 돰a=AVԟ"ڗ R] 0UlԊ3d(s# <ת\C*:ey,`d}h)x \{b8];.UǕxaur 2X)ɠ~VNz$]jaMj^urj=l.4V}ihi~Kj;PMq`Th<*ijI#K;Lΐ +J/0GR ŋ-8jY1JA3,$d•Ҟ9Y6䪑ρfdF ޒ (@ ZpuoXB~alj~"k]ƹ8#򢫏e>㣓cg:ҖŃՇ?5G)#H0EQrTLh)YqG5ADGF>SeJ7^CKSY&& YH]ԫquּƈ< 绐ɗ/mӌşuۙdJ2۳$=F.M$SyCw[άW̞f4 P/r&+vy7iRK1IKCʔ+/?#ɳO6F9f3RZn,Xz>{+h}W6][`!'#D hD6Ŀ*`Jg  ߀ 鲲5d*Gwln/v^)^1`r2\&X#8G>\נEJ"dĘa/Pd~:bx@W*jə/xa |LVDc3JOXTLvZiCr(;x5Ѹ\PØ,pFgvmtqk_-s-4IVsɃ)[dFZ"R-Ȥp&p %_)x!/wa tkux9/Ӗ1wˍ?▜OO'%h_rW"+4͊1>u|})DjcBefuIR,I&Xwǧ~0۝hy{Jfع.Ys7?.amI8]Syl_Ѻbݩ=Ŀ?z.Q=`J~k]mJ=1pVWӶO쾮[]xjp%n]*œ} hhQlANy[V5a(@5tUbkZ`tUP yZCW.UAkOWpЕxЋ{p C;5t Z= ]mi֕؂DGW=GPMtE53Wb[誠UtUPjҕ@O""U+L[誠Et*( ttut%K?@"* o ]\d[誠:骠D1ҕE-~)bD[o5g&լ'iﻥčiNj =X_z_ݯ}ӻMnO648wV{)‰[-]6ln&AT{c7~"1'!)z3'$ ˛ h(y3 _%W({X叓J'K#G|Lqde374 Woz6k!0YrL^2@w?|׿pfc9TY\A4T%㭱9 "` 5vC-vCAU톂Ҳn8BbB0 "\T[誠wtUPbutkoXa{2Ws*h7 J]!]i-juElOFQ-tUК;r9+M-+)l ]5Ksh:]wtute֪MEd'^8ֲ'@K֎NWC/OB9t}+8_{;@C -Jvtsւ \h ]Z4#+aJ+V{_Z#]R ZśNW]!]IKm]=tUZ" OW]%]YJk8]CXթ\X:%3zmۈ%wgxw[y xqBT @5稲K5$\$ ͟`~ՁʠVgB=dڻ#Zx_<^v5hwZlQ#WY$Qd  pAQAށi9Aǫ!Wz{. !HQJ#>Fr6x*ONRp+B\tE(tCBx->x82{IW 48Ljp` *6N>$Vd!]U`~]CWġM?wzAte: )ECu]šOC 2z)fEWQp͡Uš5*u"e!]NjFtEBWV%OWRLW=+Mۼ#B[6tEpBWȐ:]V3]퇮kw'Ho@Hק*+ &Flқb:6̈́s;, (d6jظ 7h.n *24: =t =#" ]! +B)stINwEp=zBlt(CW^}̾|>B]\ R+B2LW+ rZ"GxW@W tG F9i#Òs.CkEtE(]^9te; \8|0Ck];=І8>AW6SU/oötEfCWZ LmcЕB ͈ lz6tEhJ%骇t-X)$+BWV' JHWC0gO8H;@fSyI c'XkmsLi҉OѱKTstG5-lFwDi4˚3_=-H&3N >>HPڜ'c$g l ϲ ]! ltE(tCr}Nyr+k ]ZH~d!]y ǻ"Jp+BetE(tCGWlI'qNtG >h)D6tEpT; {WҶ(2]q+QH^`cMWqp݁sWqhBiEb+AW.SU/ioV׋C7B+ANWR 骇t]f,"{[&u"g!]]H5-֙AD ;U ~ZJO G.h6`<)ױ褑+N'ewc^"F7ͭV "d#[;12LdURXgʪ4>MZ&cQ (y#6F[a=B6)Na -fӋ/狳h~tz}]cg1s4 ݽml+lqjYZ]X9(Й]_6vp=%M%nz&F.+OB*6v]VBsB.N6i5 ]ZoS+BrBtB1+l<"Ns+B {WR 骇tJlw3*u"2t R8a>oj:]ʼ;ѕz$ qw5XF򙮞z)-06tEpBW6H zHWJv]!w剃+5"*y"Ff!]iea[ٷ+Т t&U*x8HzFN0 2#wi()<=匜-jDYˉȉӬ3r8ȉz斔 ;0y¦jr6pJ#]\f5o"d!]yjN ]!\DWPPz骏teio?ҫlDk;ߏơ􌯎LV,87V|gvEBo||i^{h9xx޽=`(/ee7}{!׭7X#>1~b/|ag?Eg IzSwOװ- xomkep?B8" Ѣ<{7ryڹ *܁]ӃnD|QҔfQYKhK%}UYj՜̉QI^UiuO鵆#m&t&NmM(cs{̋4λGhkT44l#j0*˟},ӫkmFy[Uлob6aoӋ UV~ͦ(?O t4?7`v}~6/8(&dT`Xe1#tW}; ?%@].LWH'\I1*K .O!?t{Ơ~ vH'\48o"nў{^M:B}Fy`/G0|>ǔ_;\\]|VWbRnqئ{)3vaZhF><%^Ӎ'ʡ]%u7Ǜ>bYѠ †h}Dh\- qyK ¸6KP[PPN8DrW#U`ʍDhs)K;wDOi~r5_Ϊ "-tؼuK࢕7F_̺ϝj7;P-k\f > r բ)_hj2XZ+VnۚF?7[?AsIkp C"WXf.44=^Lϯ,լi6BzgAݮM?W'=#Qʭ9Id?YA%Ջ?y;Z+QQa&YZVAR u]IeLY K핵7rokQMڣ"X T-JՕ*/'cT<ͭ2ȝ[ ,bXtw̋fI"j \`r!9{(h vj[WECۙbtUބw踗?/asOkg@ǒ|C"%mU 卆S&ިVqݦNI6%;ڔlZH*œk!-L0,?.׻rp"K,ZcGi@,6QJ^2kFlTGUM1ҍ B-vQw4Hڣi`&ժ1qg\w$$}M&6xf]Wf8QJ>,}VcJ> ,VxDŃnY}jRrR}m:>o&h7CKlYy1֋D~oƨ]N?Ww[Çz#d#[;12LdURz? UicFVXʠYenHoo>f띤=>ش{||q6x3h[uMI19]ctՅ6svg7m'ALdiuUc=C9FBmDxikxu:+O7=T6iGY1y!'Yr 1\wjBzt4ShFft:$KT%ӎM%OZ1o&t$5a2yE)9d~iD2Dv,`MEr>ظf)m3eUv̓rYgK-ÿ*;Ve!䘑adIY,2{3l8B8F#c\V?c\V?lijeSWk|d`_Q %X> | \niҬ/cl|֎ZLSY;ea`:N0@̎n^2F.je?n^^:qg6I?Is}֎rY'sМڱCeD&͛QHͦq` wXʥuE)=7++9qo&N!YYkdS%LzeJ>kǒ7%>.N0 cR0yцಒYz61Z6I7NZ+.^y;qp=Iasshqzxy4;_#C…wǼ ̇"`]CRa,uMR f,*ud?1hjɧH*)#"Q&XR^Od .秃qqu=p6Q ŃIU%6o6L=J+L0:H2EMNEhB@^,4w{r[~䃯  =xV֍AĭVj{bjzdQ5p^DfL f`/Ih\fk&-q.(DB1S,ÈsF遝}QYZYEgViҋ}ҫyYs_]ݻvYj7t*zƼe:nv|C aF-K8"3* W(w)7^>,dQ &Eԃ lL}y~ z=qHӎqsEC'8?NYwweNҧIڃ@Ҳ~q|ײ)[}zx˦  i;-br8^` 7s0v*폽Yz/m/i'E6[6euK٬-2,H{Vm, rXޗvL|-+k`slolq/r~*K[܏NܽY[6euSMOo&3oٔaڞ@4|o|-%pϧmi* Ⱦ2-,=1}8k(;0{i{qK?{ii^6޲6[V\2'n{c~v/-/.k~zc{)#2 Ob<Pf-*&Nf-|kf=iU,lN'DŽ'.~͞Eu}V&=Ź7g ||-+E]?'^-+>qem+5;1KeAM}o/1/ pj7V4|K3Kz~;_8lƐcv--B9FwNc~hXӦiv+XmX-䬵Kn+g]Ml)ȼ0rPN&cIlTPEr"޴~&NgYJϙx)꤯D{ײf'?ϭ`ˊsQWv 8AI~d ?`N@Ӽ:qߨ=.IYslP Y9lB/%7WS֦U .!V:L/dt1I\^-F&dɗlVH7`!A>ݑOa傛zzur_rA|C=9+lD )PhOuV\\rah1)`e_4̚fi.l2+Clq9fՈz~2V[ODY3:,>zhUhjm 2f?Ղ6h9q6lEUkɌ23J9htSb7lH;$c'(}/ZF6#v:4sm+1@p-rsыD[yVȂ,ج lF@am*3 wwcQqn;?t7E\r b0Ae3|y2@T幕U63 <Z&cyMTtNNYaq< hX(s,۬L^-_ `rgäQ[~dz[%9hpŬݳ=!ѯbD>`–,s *[8ⷍ`+-:ҝOIϝyB&U}(Ϳ桗z'*lgK0ƻC?$A= #JN㦑;NL7f-@8v$uMwZ2z'i}-% ETDj}7"M5C1v8w<Hs#r*'͑ʐ"e4ǹ2:c~[Է39(|!UPWYۑSv4J,17 yjYDKM.7$&ugz pn~#3'I҈tƨqР (*v$jlBSF Bu^H(g;I8· ݒDžҋz~ wJuC.X3 {q#= RJ\ʌ tk;ʆ @-Mސf4NP5$iB`IxS{=a )#,@CYI(~;=: yOɲYގ#m>uOwWM0t ;"OSCԌ(&RlHHaU9㻡S~jF?j!rQ65^f[0zPlaÄ*lJ oDU..tf]~7ŸƺSlwt>ZwMdem^~wW( 2z@OBaӣbu;  fSD .YQ^NԽuk8 ׁ)op#WT;ETF+.APiHw@C6 ~ :wFnMc>(aPGD8Avj^P FqiXY蹒MDʗڎ- dOq$.S@%ע#_?`,SS-Zr|R4ۮM 1Up)(nMq\JN4I^mz$zJ Z ZN"h-!v=kGrC 1P(zGW`ݬ1d"T:y=TI%hmrwsSab@F**zcv7mR+mSrgFc0c|x@bgg<c[d-vEVV+L3͔@e"GUtߎCzI/ә@JiV(CI>g!E@V@,6WK6It^ݠ}-B:+݂Wمdt{//8Lf2MG o^@,6^Hj_4<;`i`E*hu0(n `ǹdSUᮥb=ZVCC(lxXYpZBlnI9M%[nLSpT)ec>|BgSgwخ{hl9RFL짵\2 ($ w$AHAeU8AQ&Iz?OQI= FLJZJ}zox[ pRI ' x%uDfcKQ,RrBaC@36˿~$趮[ tYy7b͕C HQ47jsZƃW惲Xr[Al>a87liWVhSNs F]?c r*]]>= E4p^ӷTuz~4}APJP;D @}g\bk*;So<"sFSr-.{p!`A3 ǖUUrf TAܼxA4ځ@`Ҙ`~Pc6\5n6V!j!TJKYD>Pl2q0.,a)ypĮSNKq*ʄwkeef2M]3el\ѫGT幕+ZR)S,UV֩n`5[C^]e( ))x>C-aBWN 'UеoIzW>l'Yt۩  cEx'$ d;W]i~Y#YHvըf|4M=:9yFz@;XSvJ ޶@󅵂*v hMtV&u XKJ Pwgi4 YEAq>8j>P>'DN503Xך(|b\t>PTvx~QMZ*'|"mP#=@p=E  >sP*VhjI9SSI٨D*A)o&r8ad~K/L׷c2u: 8F:d bβpjW* 2A4f !>`, Cdn{P/Kjl(nOΑn} CE MHaL`7րa9-k%Ok ሤ%DD+nc ?ڤx~eU0Aqn~Ψ1wҾ{>TuhPygҩL& ¯^1Z@YU ~w7hO8wGZ9I%$5+*RR!S>ArUhO8Rx/$Ռje#m^iУ+$ B=Qz)xDm}˽f rV8- Yt-s KV@)E35p RIEc4<_\WF!(vPC7lmP,t>^j*315SkUߕ߃b)LqK;(Y͇06H'0@:OF׏|xJS 7$,~q ~?_ !.a?= Go^4mŭ VIt~f^G[r'vE~O$HPeodn)`x]`Όr)ۻC*!cuLC a%.JF#.yry^!E fMu/5`B̝j| ( "5W*7ΖJS) ӄIj!r緃G/ k6Q68cVE[s?1&%m-K>z55*_dmZ[1Ōv*J mK['݊G8ՔeXdqіXNgTG'c]SF{^fS#Ie+ 581S-+vi=ST'M>pzq;z2Żw~*:iGNCn~$TkDn IW_`*P>{Sޔo~>̾}z{1繿MGc깰 ֆq෿N__ұ~a8 N8M-H8F˔CHKיʬpB!7̗Oa"1{%^xzVy8>2b|s\P\tv))k`yH#cL56&U~D# H-f&hn7TG]ğJ%)!B̤)0zFG2 p~1e݆ 0q.t#*s`V{ WV-⯶(?|Y&[-W !JںaHe`CEpXe*p1"+kV?DE{-uA,s' ,!26|3#dY㵍&.-R־L< RzihFy@|`gsrHeKqq=ZuhϿT$DY%qYn5Y\.| j8OPPp3i%2[H%]9=^}\dZmhUԺMǮa(XDf2퉉ש+sUO"[#YGڀe1u{bS L +FI2O W֓\p)`aԳZPURY4ps>|D]]Cb@Y0Gx9ꁶ-ivC,SH[ E11G9J;g3,cVЊvF+{7bpQE!Ђ>.uQJŧp! R&!!.g+u(0" qxȎL٪x=zV0D^u ON@w'͗RL|9 a+VN C{L&&pj`7^XOaTοupK) <8ĨϐҠR3dP2x{ȇJx+5FH)Z+~Rdّ)n Qhj֞~rM;~u$/X!_/w5j<2Y V}k~iZեo~z^9vUbIAt4:n3;xOOӥEr;SkJO=7zORƺN(gp֨}E%W(|v۝mQU;]&50ɢlw@,N0W ZwX!0:fզ^ D\tLIr*KpI1i$.&B쟄2WKzWu2n7Ïv#hi@mfL^FR)7sw[aLaAwPg`y)Q7݁ĕj 'yjJ.󡓄Ћ.ʊ .z$XĘ fume-I$h;/*v=ң[368٢Fu]I IW4-)aX@th-,~U|u9Vy2&0Xj52\`yQ4(Oߋ^]a7ѳhkfOl쉉NBxh UxGMҋQǭ?uܡ1R>6@PLJȻQg^yQry0W޽K_.YwwvV=2-q<2dUؿxKE hKCs' ,a3' GsǘE#FŀBRnJϻqU+L|p m "D7\ZVKXAFr¾|b+: &a^~ $?_i0 ,N=?=|ۀc<^i8({z'*pvl߬1{g`o˖)^`@i1i>i%8p˟XO͑ê?L`8<7 `u~>+5[ J-NS)|-!V{,u Oǃ mnjM}Ɵ_AIB[҇Q^T (5=Ng3~,f/.ޡ0흿Y9/D((P@{1Ẕj:GTtͤ0 D{I)5EI^M,[!1q AaUDPO> 'A֎W~1$+zlVy܂ ^2nkˆם +ZnLFY֦`(٢,qo9s &;?N @'J[DH)wn0B"A`]h=Yja604/ORVKΜAԁi֜@ j[ 339| pX7ݬ2s&my9QrwSGl3(g@'pOiغ̆tEʃgZ&g|ae5Rb^2ދKwٞ{Z2ƛQ-*ua4ax;Yu֮[#SOq" R3%:1*LĢ7*sZ.2n{~S-JtLq$SF++dwɒO2yE++%ѭbTqG(z>69V QwY\hw؀n͞6.Ew1:$!CNm$`DJ%nU+HpGJ~)NbJ~yl 2n(*ie7BUeĹ̛A[sYgo ,N?| .ԑe"I;AE@2U[ԙd,Zϒ~ a,94LUiOc:aFn9׫l*"lzX.ǨnbZJF ➑M?RF*d3gmTyV[)y6Z}ftٻtLu-uxMRW&:'5.~]V!M'mEOi@F۪L@?o-NvXu.()(O3.x!oiH#fGn-'f*d<4OGӂsZ(+dZ 4b?yY:&wZ^ 1 >KH"K }/^J > H)EoKAqmш5"1xEI&mS8CBF?ZMAϲ,MH3̤c|LaP1BDžvUfF̡X]XQh`1L xU]rvElRCGxaw7vҎPQT$4T9IJ8L(y<ȼ跦%Iv`YZGJkFllRs}6S?-#=4`PP h ڌR&}+|95\{N``XI9*STWCRt+rlg?{nduxw,'2`<>&; e?Sɜ0/ wPucRȞP;g|5ئ[ĹtcdjCJ r.ft\N3ȫR@kvF`iU` QD%\J#!lfYWߚĎ8 ~Y1mh)گ,°cUenj0UȸZ)$VCefp|&KZ6A$!',sq p# zLJTL$${xuӤG2nБ$!gzku+@A<㭷Y;)luȻ9c "boSr]bDhxyN'qIfsI}:ui 9F%-썺R%FנLvVc.oyjhm> nԱ p5*dܨ6PǴ!IHFH_ؑGt2(nhbB0ϊN < Fʅ-&[F7T$F*d* fzyӚyXVdчJэXu6E1 Ș ~P; fs>üNr^7 f&TfD;ʐC BZvvO{92xV80عuW-t Sʁw6e"!a ̨5hΒ5/1[9jाҢ+M)u\JR#Tsa/yҮ 3.f&I6ݭ#kHhWc70rjkFVBCgNƓk8&qCy= ,;OrE<K6hc_JEHd Դ)DBs3g7s[R8 WY#)>)`G0mv1EO7~1mձO[!mDmQ8X;C zp0wuvZ>`YvLaر~58%w.:_UTzy(5HEfAk1Oȸf'S1Iڍ qFEWg2r0Q߱EG93k|%Ky& Rr!(’YՎsɝh՚U^ \q cwE^  P jhn{A@ eo`ċF63%d؜go#B`uJҡ3hh]{Z "){+HH1{mO̩]iWil2;NF]T/3 }$K"$V!P rBBY14ʤBFDڎ',_:`$.%QX?+C 6gIK^=$Gx +X]}Q)>`˪vWv#&b<ǖc2zRG jXkbvkEQ .ǒX(B #5 * П)Nq S1y{9WARaȎh_Kl_>e' $rp=qa Eձ\i:a,$*d\SOΨV%WVG-3b} 7&G刋FӡgX<+ҷH9 T2x9 $:9i0xUBuPFRdp!#Q,L\)<*QA\H[+R?%p8V=xM#\MC5CYAla;06ח:˴ԟay:&#e RJrFsA {44/zXn(NP5  N8Aa>OYs| pv sBFX!-G.̑0EDP!JPu,$&Tȸ!ˡLs*RqWhdH6cE OP-őYHGb8,+aMXTEf3;d7(Orӊ/AWwmh|$S|.xhOg\L]ajT~$<*d\e̾߹_퇃7W뒨~:bZw^;@@sjD{s~f|aK.el#HѨ˼ ,9岅NJhR{7JX< CՋq+NtϺ)w;79sy/1WH+ VI-}u0yM,EjaEQpi6u6d`s)i`)\kSV;۲SK3u-V 7 iS̾9;򈙘:]pznAYfk%waov.G3V˶\~f#'G F%Tȸ]">Y3iMfQ5Ŏ XOL@Y_iV2 O1ANwȹO7:1IϘ+\"D=W'SW#8~6w2N [~-ܿݿO:.f8,EL (a>1C1L ?ww ͐*b-!8tW*Hy{S&E~ar34:nľ{2 {m9JE۷ݭ &ԓfo-(|K.5a HֶdV>4=Qϗ/D } /U[7n{o$S 0ʯڻIʼ:g#-74 F?4ԴkskIm q-y%.bmGq޽޿7Ҧۄt$cgTtմp0:#3k6O"DM͌9žKqB:,j\ң &αR( Etev;v $!EUYNKG"SpsYo˙BTTx 9in@f%3LJd+T_eŕ"woBmS}S4 c㣯1)Ԍ ,Xx:jsA|]!ceO^É sȕP˧_e"A{Mj)MJh%l.xtds:R;!I(xNN$`MҲ2d/DKJُ68 )$<ǦMٟp.m*eWSIz߭lک)JSYi0&'4qD%\s8 G%]-Z-H2*_)IG$uWep+nR# A"I3 m,J=9a4XЬxRD-)С̦p,ĖOFp֔d/x*s|ۜprL0:V>k A ]/m>X>>x۪JB18ZOyv>N^X 2=ɂX s-Eׯ yiQ*EPFf0)vFtIfyt>Ҫu R%A2`%^!kcyF,Xx-;C!-t)= SFȇr c} Ά o KX]qW!Z<":̰-Z1t DtW%yW<<-ԉ8Nˆkt&2aӕEWWBj{5Br"*NX35JbH2zDZ$+=!+…R!N]΢-p)*d|b5{ʊ(s8c0)U)"+@[ɹO 1ÎxzF(#e@p+l('UIdI˫k8iD#* U>2San?ѥEIdz.*'.6aeWf1sՇq΋gHu0%[7-NWQFz)cr1$ٌR&}ÏԄrt0eݪ&9]4g$JF][o㸒+Ƽ.<X]<,$$$oqŶl*GV?$VXׯz?0)Yq0/fR:;#xCɡ,m >oePA~ҌƸYfHe{O0ЊKz署o+BWgPkRMۤI\gooGB0HNfG4 x* 㡒"lQ9R",Yn~}Ѡ%9Txk=Iirٞf,Gev *l4aXbtY^]WU 5/ۂN˛ԧN0] iض!xns..SlrfR%C­mMgudZ1F$KS؋/ױaDz%+Dhf>T{a fY~΀%^Eof oɅ |vXMZg͟ԡB IOzLId,9T{}NF8 "׹^-TL=K8xE`BY2ZIkEc=J^Ϳ.bH ?{L;Tgk46e߯/^jq4風i-*r 6^ Et]&K <݋*}_J#q֝ɁIqD3QۚOWC8~^MR)( v4cD PB{HЯX)}0PW~:; X=0)`w'gq+Y j[;llyob vZ%w<|oEU=: }@d^Gkh,npyyՠ<*^CߩxU}'noˎtkhؘiDq6u~ʷ`F`0G([&((c0 ~NXJy.a/+IuLP#Ghn9XbT6x~dyFXU=-Rq8xt@p&7ŵK(. CXР0&[C$C9xCSɐZes03#_ + ^Fa<(!Rոzb mxVP^Qf| iZ*[RfQzuDG:0F*3ϻc[:j6 *w +dAMpXzDѵJboeV}-zjnj/` ^+ ~r/8aWؙG6No#4O݌!#xe 2LWߘ}BS#)e`7_FB`3#_w/>_Nie>Ur.T窊ftܚЦ[񾞹'9ꘃ5?6(+K0v|67 .+/_`l)h_褎)O'0wxپZ#lCqNIR:%9XК$N0L>ji^:ەCQM(0Fp@3䜢Q`XOOҀJ=2ײ7Eh}2+K.a eBJqLY`NedR& WԕLYjJ̺[w=X^+;-Du/ZkS;s KI|s9W+{l.3jHC KV^rn%ߌ'_: so'bM}[qˮ$啭Q Y:5 GN‘+rUxUDxFWCJS$b^ c c0&cl>TJ,wM5"CA` iQ *xYrCWWW:0v T `VN-!mqkYXd! +g|c.nƕ :`Awb\zu^-%7uɬ=hgǗuvp\>rEHE+ 7Ey%~wכDg`v͟we•Kh(Mga1 oH'%9Qoy~+!NꓜvjE6\Z **vNTPS;JĆiehw"sjhlv l"72mG¼J$Uo)bo mX(J=Ewxfdv B![ "F B,63:*4ʜH146N\!se^HQb"2x5y5 I{2rK0'ٜ}[5PAbc8P608σf,K` \p>)aj۵>P%`Pl"X;j+[LZJlZm5!\weކsJ1D$O,!f4hN !Uix`"ޘkj6wv!?Dw>IIԇ+ݠL\vcPø+f3@\*rP 8UыWFD)^LghakMI‚Z[sOϩiE2Hssrkh0ycNv^ĉr鬆yA1$ؕKi_77_hqm^C1?_xn+ -8醁{1A=З qʜZ0?jSsPhlt?V]ӋVIs&YebL.##D?Gl"]geuKg3-|ii1[]/|K̙s\pyαtùNF,V Nbc%s3C=v\z╃?p51B Kj=oKa9K{Uv >~+bc[H%"K4^__G5]UF%mMnWE#1# ]rcp;B?:OIzrGcu]"1v,i*CL}cTbN}P3c\OZE#`0!!$Ei޳ͼkf 6DY/P.BZ[p9'ܟm rT V*pAռĐwքl;0ESw;_% SX&E~ۤ3FApSsثUe^Ajh?NH7,uB^sh@Č0qf5ok}{ȒI~H~$.ӄTWM-^`MĐT:5KppJ̙֗l7 yn݁JՓi :pJxPKZ1ddM <`,Et^"f ֊Yi&ΰeyt! SGs"gD+2xXa[B2mY9c!H.΋ACPrlvQeDM͢8獘d1IpK׽e,`/<Ϝ$%ƳryjKF ]X*8ΎAzp 'g[pI}k d_ka48NhfjcJ C6ҩӃkm%Ka~\0oy2|BPK]r}!5/1kh| X((,+@l54>F2Z8)Iͯ`WZgý I?#Q7V&LN BAõ/m*v WZx95N%UA54z3S Nb>E'ˆNzBQ*/QsSkh| ÒC&u əY>z2du<_6^:>U7fb^e` nRwŪcn%K -!ri, H*̈(tktn*GWz2}5q?zwڰRfzؗv m4;[Y3f12Ʊ*Q gqBk%`7WWc"{aTȏtsi9"L06ّE'@N(0!F/%겝PSLT6/ j&Z8 xC9Q84FJ.QDT@F .δBeRkj 6yB9?SZ}`šHF+w}B߾lj(X \RӮcL~\phc mɚp+58Oi+nݔ;f J'rx=)6bcś[P]ylr\4~Tw^ݐ"(S 5:GumOd(l 0|q4VewǷ&4WN[M,7hKM'M̛+>F`]TK{ܛ uϑw:ͩ?G5ʉޕ5yQHC oخQy*+ӿ㜈S"|fz&w3[E㬗,ꉄn'ee^[7AD:^;k*5>iFcA0 pE1 ^wKL^9p'G 'uN5yx7G uu Eh5tb\CVlHUZ8BJ*6e,Šb\6),ONq<=Zh\ GG:7Oֱ biowLə }4:R)ʈfk1;׉Ca};4豎n=DdwlWtauh糅r ]laiA{lWXPxޚpa>\XoYwdAsb #Vѯ<Кqf Ϗ5D胗i[&Br:<ٵ s-@SpE9V7<'>-=I>(hgdb=ٳyqC5zg7?^]_C+' o8߽que9Ze9Ys?\jyOz_ ڪ We쯹+&uw`JyJyRSJhND.z_u09gHi4GQ)-koM>2/#{KHc=61ly:g{[UKzy0'-bg2N=lΩ-fG:;;7Q6k^I/Qz۵.w-f*.Nˋ'{?7'gzi4T/+`ߔptӿ|3ZwߕtҜzq&wzmFZs#bZ<ޯ!^+}ik`r?of9!{woHfjvkzz7Ձ{8޿m`4XxSilŌGs'㳝I%o-Tc XV)+`JuX=Df,Xe:Qkmpnody^ɻ?$ ,ͫPh^j9_(:L?tj9XnKEA4e@ Ssb\ G+P ETh6ƅt)F78{Ds2--w/ld".bL *UGFKgI}[C 7Zqv {҃x[ ay;Sw4nO[f뜥M*OيuFSpK2器y8*jP1jn3J" (bt)kp6})Y'cV9ȘjA &KT1ld1JE^ZX %cvE 8} gܫurý4&zLiM{)'-UN2mXնvS6^Zv \y'Q=`jl{.߬4c(KPR~drˤVqW Oslf[̔k]^O6ߵ1堹&S "߀cQm,LU8Ǣ>EbCJ,Tʱ4(͎v`3EyN(y]y#y`/@`v힊k'qAs$Rs"ͮZ|V<ﮝL#gUdFYaoͮO"^p͚d_p sPZ!qGMnhqBː}ʙ?SQhvEr_B|ϑ(OMgǽ^[z2ZE pQG=ۙ<rH1bYҌk f^3ЇitFĸXH.8? ud;/~k%U2"_庝7ןc{؍|m9<'{+˾YT@H `5:,P;GW;yuN߮'NYjfCNK{RJi8C4yˏFe:xToKE*RҚ8.p )5qtyqþE4vωEk.9g "f!9Sq|K05 s耭Ek7v Vmo8-'GNqBXZDyx^5{:0q;g=sܓ;k|;s#5$jidNj :>VΥ`juǧ6]. :LkeJЯ2*ϓO{:mÆ3>0[}NdfY kNydIg.L0*~KX"O;CV]IyFNL , -_ݿklYt\Q$zɝ޻w/V7_j]\j3wv &t y}w"Eww:"TK~M,%HӼR}`T?|hU\uJh^&Ԗ*hoj-dkSBJx 3xwCN\-,Z_I|0r:oUEE(û؊h2W;ٹ\ޜ|z]C@X Dl2!e6Qc&^=vy@5(M=ꪨXȥ#d?L:ǿxݗjWIW BhhZ>@bLNJ(IMF{=0rEƢ+Kpq|=@%=`)WKt.545)פbI$K߰@jg1Al{ILĬ)88(&&UDs.B+–ck"GVJzahqIߙKě8=Y{Qݖ0t֖_,*2 J\`<Ρ F\*XDpEJҥTb"NwI#6A+z8- pNhg]D*yh۲xkP9z 4PRbZaF,Au$S,) ?!'[47mO(l*TQ ~6j sv)bTl5nY8>i4̆qŎdWDfD:R\!qBA!y6:p@7EH(ENBDž=ecvVFxocp$ꩋP㥧zWd oDNS4T7cI)Vy.Btd )iFe))eR}͖w˖y*އ|& 9hH)=0 br,.o-N3%2<0eA~UQ-U콮@V)Oj&]zҿ&ю)42^`R.h.+r,L"H:b9X +$Y[dُTCRM*ݼ}VV\FK Vx]%x(0 7hJ&)9x?xˁU~<+0dR40gyN?o;/Ywqʫ)ٻ8n\/^p>qbrnRWKCԢ Gͫ3ѣ.gQzLu.h;RD;{3 9 3J]`فt 2UzA1I0i@'ǔ|R vbgMG/&o'^!T%bWc^ڽS ;E0(eTAjNȸq[3fK2cԲyS[hS/?EXG~m7sv3Gh ljޠ3*ϴNK!Sn0  W߬h4' r-BN@*I2Ұ@E a&ټEp颺&#ִ'oۆ9tmǽ|Xj)TH+ûg"e:[ jBцN/[蟧pJs'C2@ܯM4 .e/OxGW|E6-Cdz}c~wH~{7LĦp'/ԃ HEM*^0>cxAO7 so.,gr ^~@'~(B6 9 NC t$bt_h -mjf8kwtHr"~k?FE.Hۃ!I3R[ov/5IMլfx ȇdӵt՝ʜ~e!Tzh1vSj/_>XџVAVЦ!_Rb1+[!.gT7`&wx@ʄ 4*ѳi>ΘO>_2y`^>|s7kDP%(+`9 ]2r~ȵ" {>L-uV\!.j,i[KR6{F޾2/Uk<{?,L޼~ͶָB~)\ M4$M( G[v~xo3LIqq]^;hlLc}Fq$c;.فSQZ`՜d i)=Ąw QZhIAw "Z81@ ڑ ='? [Pٽd7#zrF].১=Žݎ[LiGv:eT ٙSLf$Cڝ*WfgF?]Ē[/ W_| R+yvQsh+(H|?}5pP{jH, ʶ~Ѧre[lh O6snĉ<Uk8ns6(svm8ql%赏APtU'utCT=Eɡs(!Rt^]cRqRģӃpËhۘ)Xdx*DQS2ŏKu1&HJMʻ;9D6ojA[m2@SqzuwtaFj+ڄF ! W 1yyrԢ$dH{,-Ϙ'dRJiupc@mavM}f ce%k1g.'-`8R^YMܩGJ>fSWII`σ~)A [<#塿./Q߫Z1ef@EH(e$cyNSzIWrczK=‡9`!ThٵlQ 'b`J ٽ @24zoRvBsa;^3; SrضF[F1I˕ 5 n*NGU?j2l2$<}eӶ7,?LM8m}oyۻbtG.CF3X8* &8lGVk(q'ݠlB&Yx*?r0}GKor88=ۭ+zzdžYE[&\o\nu_'K .0w MiUgĉ!4:SiUyT|1/6hap3=ȈJ1F5آeDּ {t$ٻVinJsUaUics-%}ro?ﯧR>CǑWcϜ%$6Լb֡d+(Q!b qp O=m=S}:MyEřI}婌a#lE~0 #~JmW߃Ɇ'hpGv:}Ƕ{+YW!R:\U) pa+?( `FyR@{:?r0* p2RhOB4j ̢ ? !7}i4WUUK0RjtWv3= ޤgƎ_EGJ %!dMUr5(LGf>nRl! peۓcD~c\ .\Hž|'V}81]2qg3g?! e^=2+mz3 NK^ acMb)cZbƤF`z5R t(tj'40vClD<= ۈ|csm\5bf{Z4QnuH4!N4Tiv$8SXs&#Mӊg6BtA6+9?pX m2}]o7;}čfow{L$SdxJЛU yD~zE'ؙZB 1$Ȣt ȇϴÆ36lO~ʭ=#kyw+bC7UؘU\f %-v'ȥ-(ӆ-42=ӻZmհ|+ck?Chhe{Evde"(lK>ݮM913V36ʥ;T8t3![22p,WB '-K.D nwct /0Ƙ,SI I6ZWub8b27а*imZu0/\嚫, grmx^hp?n$FJL\' ӌ)5&,l7*P=OF~&={O^n|ǖ98qА%#/#=O|{EW|_p}ݸ(*UZ )cIgA.s56==nqwoԷ>o{muKCl&6N`^=D cyK)Twͼi)swkEhLXMbC'Kֽ+}Pd{UqZ7TPLJ=T Inim >OR&%rW6rI5ZG!Ѽ&فlMU*J4yXe2;A(nGo+?7;1£Q'A.B=36L_<T;$Tϭt9-Xv?.bz+tӐ&zM~K2#O~$ȅg~ÖR>R{쟍? n?C72.ޙ\9~/7HO}s>`K]͐G;s4Kg5@XƣCOy{:gB 9%cT OkGp3(ltG3hp$h9Ŋ{|Zbg:- xcn)CB3vi;-M`-ϟ胒Q9ЄdtE)YKgd s[v$;.bAEc1 Mt.pvw>˹Quv̜@*b³:(~{>g8C=m:Cy𤠝y9%ȒɋN(#dΟVNxfn60l[s{GvXƴu ;ؼ4.<&OgdwFvwZiQ-gr˕0l/cA gM[goAWߺrģqo=Z#m#ϸXHE%-J~A}Htƽgܻ`fR=3q/QROgBoYYyBg{M{9V#31sΓ@Cla!3[o:wv92=%hʎM޼!3qLmb4-6-p*Q$X uVfa4bFФCrqXPjj:,r;FHqЬMûP[ۭ>&GRWi^3qSF 2Z4kG"&%k;M{%L'^^#T5X|.sօ9kcIɍ`K+I-؎tCx2P~,4SZCpv5jG}1,m-d]xN o;:FEB,Z2ޕ-!(e7['79}KR:&іav [4nm&.5 M>m۾XA* [fsi ֱ.[_!52Uk~ܼ&2MV.D%6U[oa +[ >B)᱉}Y'6ՍS-5% fQ)F+֙c1bW<,>hc fUxJ Wf#i2@RxZxٰV#x{JEJ>hcʁ.zDx< 5 mn%쩥b"mIBF`NW_/lU,Фhr͟GR8m"MaUБפZ*Ǿdc#Tj jg ii6U>ZJvΣ 5)25_(]5-2^ZQr9=eCumw6l)wY؞Π\W3Z2R F`ū;K\G[@rXXQȊOq,O6$h]⽥F)Q亀Vgm|L#vdW}2v*C A Z5ȝPRr)M6}:;z2uHot.j&lju]K$;KK9V1j}g3%DՋ^[/޴ B DA!QF G:یؼrBzݳ qZ+LI(;QShWGj #olx\C\dxK[?:*VkiiZC xǫ$9kra?4kNB_,731iь˄7mg( KWz )Z/]pV9\y ӊ ,lZwu#Z^M땾y{ Y(& 8=V2M2l@vA",6⁆z&@eIs sZ:a+z#cy ߙn91ՈG IٴKEP nĸ4%5|q;5*BM͙ Ve曗ϯsaRkw];Frlpe}dB3 3;b[WR=j@  Gxriqo>EڨF%*rhk)?^7Io1k1.?z#_#J- 1<?t ؄7"K8}gȀyrD]LRIp;7rElary*T>۾`ctS:_9_Yodٯr~]a/~b?=ծwrP=IfSzѨ% ]`+z=Lyf m)KIz.uY 7~Y+ o+32|v7@c 4`_wv ԝ>SKT\W]m=4>r@EfܻRb<җ^FjE9!+fs@ގW8_gtdw3:voVV ,Svύ07֪}DvLg GD =qaW-g-S?VSc b5C%yL9%)J%3@i[]e:Cp-Oy\$rDڽ#9$,VRv~ <:x-SDd6 8IQNNv +ek2elqx(tn {85u[s".؂m=ʠG- |ajš[0% c{75^,$rǮx5'u1B!$7\S1Zt+Di/)ذEEC/vonG!&^M_5*5;7PhhɹKU pΚlgeTyUp`nJlPӳo?͈Mϗ^lF_[iv/eLOXqN_QLȗ5l/x:s_g> 0<*=P9loյͽ3llJl}|w\%q_vȉ} khM__uud625%$h!i@,2 rbRj64K,1S ’Q5iҍ G% Bf?z v[  r A.)}!龎g}w:xH6B~F{䬦 b];v7W3Y(Jv>ծ(֦WٗXCw6PQUGKuM^ZSJq e#?~I5J痺{A]r'@P4^T7M;zd*͉K)y9D akPS Rl׉Q<1N`>x.3sܭzd.-ǮI#:#Y%:bG筐P}qa TG0bv2\Gۉo>Pɨ-jKܢrO+n]^l ߛϕ{Xtq抓~ D 3R ^4p|щ>&1JRI LE7 ?C*By⨈V+ɧ, zkrm\8nlJfVqẀ;% mAipZ\=gsj<ᘩU*?E]]h~s}u+lMު.Q6l7A 3p``r4i;~Ay}>w_ xc% WzqtgȎsYbޛ}<l!} 1_ʃh&Ihw}x63Pg/V3]É1i v'a7YPecC饩2A"jWTL.]h˭r0jl[c; RC; a F9#e:MکʀjduPjZCLٮ dz~sFY3;Vt^ciE+/sfd* iI'-CR{6_Lrܵw9#^bveqFңԅWېƔ՘I5 VnV/Q[Ժzq!k,.)^K3d9em#My7{vh"*r+t(L)͑feoO)}hdtHѨԸǺU;dg!E)]}QItKpl :fMÇC^M`ytxݦ½!; =ሪB9oMkFӴW3bcx!xTYctk1Ab!r;}JM&ip %ؐ酙?nɈj+)y}^{nPt-GG^}ӣ|NoΧI怔3-&Xj'1IS;q'vH 4( ]bvT@bvτى;=D9ΐ0dUTCщ%f׍o:1čL;1)N.Q5=[(ڶh,a&>UKevd\k?ϏѥGq@/+_[~.,z*&̧Sji6rYƗb7y2 j\.|{$͈q@1)3,J1VG(\8vY^>se&+J1?&3plm;B҆gQl8)-QǝUTŮD)g5庋~PSğ~>~/owj H;;Z43Fˋ3e ڠvw&s{j3l򰫽/$nLk\"PQ EV(YAA[ob\ rUBli #Ƌݸ Pq%UaI6F ګɇa@-O[uc-z=g<g^GP-;ڹ&ok`Zٵ U,Lc` ':TNgνt;ެ|S=)?qDj (eJJ2(!dJ He3+[ /`g^Zr tKܕ}جڽuo!D/:֬;RvnF3G #22F֨C< #J3áO;, ^3!LA&./ss(3g!o77:Hx1P>lZjnh_7M'I//j1ˋJ9[8\Z+OVufs^WY5~ȚbLnRAbS'hgK!MGyI[D^dˢjL&aGJ"c`9f=M2DJno'uaCz.>ww{qFy=xv~E!ǜ8 2{ܾ:aΠT,Sb\4E 2EҒ+;#,9s~7ï>ʡBPM GgMχzZ'~žz+Rl{!X'MiȤE\tW`;)Aer6.>c_\M?zrqIkcie1:bvh)/b=,˝!<GF L0)>SBS6̡:Qb4y,qhKԮ5ڽLjXkv Ym=X.n~>=?Ays~ՎC:g ^g**mYRM] 5ErCGƧu<2P۽F^I: ُG(4W15h"W/>|t4:$ͱ:E70̀STX%op -qS;qcP;/Oln1wi$Q5gbDw$js$Vq&cP;*QD:-67USثV!Q;f1˽ yր)1z7c1Hn]$XrX[yqǤ)_gd 59g%f\n.4eK'^`߶{1UF3Eds?g<_-pcYlbF~5rk:)[_֩Ckw]PIã4eʠty|˟(ξyofg|Qܞ;-hv}*J!gJTDE'zo(ܚã^~Ύ># ܍ݷΝwV٭Gkx.?&I`_oܗk쮪wb2)rnDXGR9 MB9!B#͊ ؁heY,xqmq.WKxK}5&\ָöJkbDT:drllҹ8$jNfϓm𐨝`ĬI,Q; ;ΟoN窞+EVg`f=6B%d7דxv}ǧ ,|fQ3"Y[㸲.GXem.Ԝ+&"65xN =PepTD_˹cZ_ش#Gǘ6.VݛV$,*캩 쬺pgK:aΠT,RMQK!5P1[P 2``IiU.cW}eHzmKsكUee"T**}y)D*42 ٻ6$w]-@؛] 6l' zg$F![ >FPrD4;0bNw=WUZFe4 Hz$\Kn@qJPpC抙Zjfr s0hWiߨ2@#v(Fꔠ<`qy {I(o~d9z1SA}d%iƍ2H ʾIZ( ȁ)QQs:>ͬ<.Ia!6GLBڡ8tvfR$F9! yvU7MP2NSʹ:%hǐbܺɓwl \2cO*5b.bi^Qح@+z;x|w \8)!4eBpsTiހ@{sbO9HRܰ*'Q"; fg( 71*{hE5_7f洐 ) U7iȵ=vhbThpU6Oq$2vct`Ў4a^hfh*O mcߨPB0VvIeFy 4@3}(-U$B72ũ _:i ` 2SgYdEYX)&(TJK2ch'bq,ΰ1OBAeL80)T(@q㉓+H"44 yɨoǠ$*~_ ݤa' &hͺ[GhGyՏ<=Jάe/>][Ho>H=[je݄gK;2m?>{տE}: 9sp}Y.\ ,ŒS}Hkh9)>Oz  Md9E3Tp_; ӞgStAgiAr*j@8ε4RL氊!3. x冞c ~~Oo_̗3U>ȯ"8E~hTe*9ZfU,\dTV&d}%u{ye}U ^ MydL}ݍnSw$4(.BLđ( 7TeaQ۸M,Yq*瞝}1q%}A~)A}亣zZ{7""ˉ; n,{>r3-I܀jww|so6}鬼1ZiLr-(!DNc^uS>I,}6! &y2nW&$h^/&b_, *ٽ'y' d;WiKAqPxF"u;y~{Bߜ Ws2)J6fj,lˁ+VL>>sܞ?sӾxV8] 7/WaѿE}k Ԡ YDP^_o6T:Y; L1/ߘ%hX_=\\u#?Ƅ|W7/2%EV;  "eo%R;BrBuy(F TkluxDKFHD.]HDl~G$ۆ1UPRk8R6"r!^JaEJV@+ !4Q㓌 H!s[˷ qw \e>LAůs?qfl:{O){E3҈ }W@y%9[Qm';;g:̗EYGQZЈNpzMΫ@i@ hƋ>o!6Лz߀C9]!/nOJh0 ?KFݧ<ߑK-͹Pz{( -_/T昘?2RHdAfidR^%Fy{%y!bB@O 8kxyt:b=ן-6[7C&xw8R6R+~u4R׫Ns+D{0''jNJ6)R{t| ZWZE~5&k~)>DJ:l4S˘]wn0|X3Sq^g; b\ioI!1B&ny<4;zJ(+|*7mQtOϞ1dPĒvG,?&7iEדpUusy/h|O6~_?#MF!|{2_7N@r3M=|pԀ⾣%Z/3g}JY q4sih1H28ёIHg]_xlB1qٞ)O{[ٙwpuTnr K7'.؝8gR𙷒E35QB }zm6{m!{WwA a*d~ʄm>2t7ҷT|iioJ*ܠ'y$Ʋi<@fe*hBY "1YaVH娠`J+,Uo^>_ۯ)U._Kl"{5e@zfY(RKP?5þ TEpԯT@$CꨊeduExN .#cIDKFm~6M(dAJذutf {0nB6b5iM(bϴ4 [#efvd.;[:>lpI)JK uk}9*M0Yo}P&?OoVzc+" ʮ+.lMTQXļq '$kKU& MA􇏳ߵߠ'B9#4:!Sk$g1G&Fp-P2f4ZnD%{N v2(S7l1_0l447 GHLK%U@?݆T4|z<:ŕx.ڏ.)9cT3J/9}r]kMak ?>jdfӱOzր6N嚓_e=2P(Wׂ:2BE t 4)=ϵ7 ru=A*KxKS( ڍ s`lP;EKOXIh%MW2dpA6/. }BԢd3g_.r5{5o"O/Qeoh(L~OಽKe7v+Ɠ&?ȣ%!F!wzv<[_`ƴE`ܼDmԠ޳KK4eoY{W >V$P|+-CS6b8-e̍tiY_T%q^VT-,FPRsoibBgo_@\VUބ{]ԓQfFn9h^hu]l7hh Ϟx*lZ"(.@~^, xe09QAOY!\q&B(M@cV[0g&I/=6]Ŏ]NvI)R4cBjc3E'R!˔c: @,`)o̤5|(pkNN˹|^j==huNQGuV4y%2up>݀x0DO+BZrϳ-͕jϘҋVֱ֐E=; H@gE yBQ[زy%R1Jt漘vC~yY7) R {^k⫢׽DY7i:1H6k'ýh^'- Œ5Ir|(o0G{ ~?M}Y48ZgK7p_L&g*gǦGcfb|3-/f6~:.ߧ}Ȟ]hg"-q.p,ͬE{qtLi\1F#b .jmDʖ~^H- snz 1gleVg~χu4OЅcAvN }o-PKтy}Zקu|}ZקuihAcp($FĊ&$Q1sg/'.2`gMIuhAӁE ʣQD g„TuxTQ@ S;`kZ<zAFhԗŻfTvs;aF·_rɫɫrjy6Zg:;?s&X \4C5u&`q"8pȌLU6Nt5Yg3B؋ CPK1e*"FNSmr?ec}y{yU=oTK) [))߹|7@Q9I(م< 0@75">BGc@ʋmҏr?vjmsȘB\I?s)OwqgcWfpw7UBFXi{лB>z^RCSK@[#q&} 15:pd\mQgEghN8{|zy'@1cj<磞/mH g_/z+x#OX}>=(d J]0Xx)xdeņw,mH,qqM$u;bg $%hFG{L" ##x\&sH '_ɁrN<*~(YS+ad6چ:Ŝ=8P"5$_8FRj]9®M='ϫY@S=gb=JhJtFS!.)}Ju5A ~Pۇ}a2c,sHdu ca0a&<&@X|Xt)RcB}IdI_ qC+Ué@i\:Xq4u $% 8)R"ʲ4ʗYcr:rtrB*r_ޚ5aX>V!{H cpF8hj5ᘓC r?W Bi sdS!h=6xp>Kל94Y*&^dѫ,zUO{TҕJyBt0`6eIql\ 9oĹ/s_Dzȩ ?iHS,JkWq*]ZIs/8HK0BK)u%R\Xmawt8&TrX9Yc$ki2+/1췦J%s ) S,3 >0N!y!O5iЊ\3* C APoQzl0sHډzY\Фe/=gi9j$2k%)G^3FjG\+<CK\6.3uGF HضY:;>,\'9E9>FL'ҡN0g*I -#s6@Kvoe)w6] դ]C, mMZ3an\ J!+j-l6fgo>w`cLo{oĩnnb߮1i_~˻7{7k}v?=s̠'},OWb%18pBq3) x2$b2xIIҰk,C[M} 5惚{!Õ GHLvmdΒF|ɗaG#;O4ɓ!%CM@#utYÎ&bGH95  wsgܰeao_E^+:NLqW䊵?.QG_Ϣ//.0,ɥb^׳ˎ뙛w}oGz >#7/*%١MI(QJbYuS(*2|Er&O&:57+qҰkchݢYv.\;IuAaFX ,j$C'bǻr49DHL>'ʦWt,x%:Scfk\զڤz5-eRtU6>{^mMWv;^mF?`ǟɴ#M!58{vÈ}.HosUjR^Ӳ7@:QiصYI4ZI7FN5D"vF3N2po %.H.I\`-`FY mMT^Vr=^v97{F/j9{zM@NXS+H3V_b慲[Ei)> !MI+ܺ~;'-d"-d^=RG9(~%ZA%Aue+{:֏=;,.#E T9jDl=҃Lx֧o'w>M^?\bCɠr@ P”n|:qK!T3H^yՏ 0AXCCŦMǯbXpkG[?#gQ#WuY>tt55vsmZXd`AB l:`[x;2Aʕ̂ q_T d13aA2D%VVSAQkJP4ѻʀE!ABߨa[Q13[`3x]P J! W)4=%Li>6qnKW%XI hsN(;o(iz)TMK Mlte`  [5Ǫ)At턭xwl cInmY 2%+bڥ~S{d1׽hbBh ҪY~a@g]i%c?qzuœI{F7]|y7ūU1'X]Tqòt8*ލ|2jQ%OzKAYN6xKԷSCv<~cb'$rUY㚙-bL^w!qR]JܡZ:z`ul»hcؿEwʽ|S<;yy9C ۻ/q/Ñ>z,@ p.g7QƮ. ndcxQxTcv>:T.?z?{۶n2|&iQ MM` 93[R$9vZgH=(Y"C p,"g̙E]~`u˓oIix }3e_O]\}Ooy!Nrfh)g^|O٫W`oF/ ϝ-Uuz1.fu6 (x=eh]\Ϡ`09>GcFpGxf!g<\_rܕ?'PW@,&_JqyL0lp9|oJg`To'Ȕ_>0N#BHߊcaAGsq2 ^ ǯrqXhr{1ޟGWz!73+EVqgӉgk{`3d>7kX/>]lnLd)};@^0A!ckj i9pOr]^ H2-Syi38 eh.&CB3߰_{ ;4)߿{("nvC:fTll`9 iԙ&^\i%?l?=Q_^jpR_D~"fŶ~Z+6/wl~9"#)R/{By엫D*0;&3mr_rfeZ\藻-ax6st/M" K@:QȢ3sU6aicmd;(_VL&4+=sɗ-(FrĎIBDv8Ju4Zq׮hHT;9Dݍz>yڳIqlA,) %!"3ha\"L09Z'"n%RǼeaF{Wbc&v;gpwNjOltUkqmE=bV&:ovEAbZzuG ҥ}ST~,)xwΌ^ Tl |,^v۩"/n~v;o.{↉)Sn.: iV:vH@q߱n ȆouC#Z%9)=-J q UE}#\n;pFpǶΪK=JE5ZMD6"~w!:JÈ2!wnDq馽0+4`+-`g۟&yP0bj2sr,;뇱rQJiG^I%RQ:#٢$7D[G֑u$H/O: k K,A4E-)u$U6ajH"Hr)|S=yE#"Зca(/p/Mg=JJwLKM6$%xmVjp#x)M)*o{͘(x("wptָu_nW,qu! ,}9dE;%Nmrϝҁק 3wKuQrDcbA+ro~VҀo܈VnZK̀Z "l֭Yj/2XcJFu4 { 2#a`С TqH9)ڱ':Q+]ePv;jO3-ʾ,Z *ԇzaPë 3s<~Q<9a9όMb'7y‡#Q[8m3F 0 FZT5P}lNJ>q0$CE0/<3( IQ΄%KO ҵ*Θ'] +좑YzCG"!8"~R>,WE(Ƚ:P8JvaXXbvkӵNĒ=lۇc[sZV˟;/?i'7<'qEs?S{N^F3, !s~ Gc/<3G5 =CbMg0<[etCf1Z!pdEpIiGg)^iTJd4V n4quF-oLq^b<*%u3C)I UpJ\=@!4K)VI)_bxɥ|8=WdJܒosny] 5& 钏ח'9dc:Thfag0~zcAzW^'W? $?F5k1O4y9y 7kp2$e\p2ϓ[,\e"sc]k^(xT?d_1pPDѥ;FNɟ#Rk^R~!R:'tiP:X+\{S28He-@茰gRC29c+9b^Iw]N =Qf@p ؇B_ǻʣoY%8麒`LjDOt+G#H-A+4w #cGwE)ORI%e X2]  gMHRcCe2XřD4WelK3 Rpģ:I/JtRD1wL=C=CиgX h8ʵQ$`pJ%OerI ШmN]9%k5YFbK{NDVlV?sꏭ#'LJI( ͈JXD[/DʬI%V\VsY1̲TiMkbr5~?\ɨgs=]?ZFW)+5.*kX>!9m>_Nknaw ?&)^H#F 3O?cV2qF1Rkd 1pFp iY9"tCgxl&1y% ҩŝ}Z;t1M/:㮝оQ~X3ٙڻxi.3z˷tĠp2BY9jtS)rd\h"3RNvw !q07OQE˷R`7p,[jjGg=B,0>+0.F^p2sC%3€B.%rb[j)J뾨jP쭺?P`ո/(7ښ~AoE ػlpQK3 vXXAS2š;PQ\[-*QQoHF#:%F0Ӡ8aP7d*Bz)"6if2AߪEE-(?d+U y1)1$vt1FcҨa`~Y.U2A;Ek?eH3LM7/XI\WCbXLb黎|!0?yznyl*1WZ)N$yI`%e;/;Oikaaе"jXOapRXv\q`O]Z;1ee $!e(;ᘴ)} 7H03,Lyƕ@YDR$bi&b]y9WD<KC œ +蘧 V4Ic OR7 UIWG441,3JPW JAJKA8a5"4 (mp7\-]J.??0RkYuH $5jS*\cB~,x? ºyZ{ozw^G[j\t}z,|1I醢97]2D.  "IBۻ'r 7EFӓKN\ǩ`J_}? prv#Jrr鿰uY0E_~䜊k LZ$w[esTufJ5 eXq4.d% Yr;؂M7z)(iK1I P#HixK u"rV]퉤+CgS%)0)c; v.XԆ> j-}Ju)ŇJ"wgZܸ"aHa־ Hֹ/YlmvFh4NC+vkFVnO& $WU#Egu2JJL#i+)I{U lG I  &.R*cXj r4v W󐒡U#Z¿ #1&myzšgXCŊUu<7X:Utj twPDYX(Mwїr[@Bk:!\GOO=bM̩GWO=OI [L@1 ,j1U}~! z]&^~a9Wu1.迸Au!,V A\4 9WTnHjM?.ospב6Ῥo6O_-]}\oXo;[_< 3.A8< 8v>˙ u!YqBbB@M>&acZI/H7or?[ ȗkΕKL2]YȍENPO9R,BzC<5h&I"@\ F)7VxFBD*j|(ڔM._A1 ^>v?(`Z8YHoG,?][1$.VS] r;~_2EC,څT\M3tJ:F6r5t+R!rSx48(L$!M󊂈P)AcnXx.tzO8g  ý5 .lT2B:r3ѧ eDSW8a,ꗷPrTcvŊiTI  z܂!8ZASjyutŠD#m+ fi<ΚƂSAQ=uҚcY+++)/Fr6 ??d y"Zs%IP0Dy [Nͤ8J!$q?lvyMIW(Ne$ _SMJb@aTb\iK%#̦ϰx5Z+"b(SCO,-W%x L`ۆ+2 vE3%Lz'nݞBJ&Xaf4rtl%:lMXgök 㚴tp x x8.3F8$V9s9sCdc7%x,lL:v=ަ/t 6rYLGST6!`Zje(5JT۳b5}vHK#Q!wvԀ-Ν`T'9RbCz%0V洴/鯐ԅFnF>s-r{aJ3L o,ްI/ P.]\:㯷e [b @=yWYq/Ppܣ;_~{B/~V/sḵZW8<vbӻ]neu_^w!2-6=*vD%EC-AvXJ!Ir"6ʐtBJļz@#,jP%iI*ݵ٘F+H6.qӫeu5RP$uD8% $҄!THMmb(Ek$EXȄr^a.t)S1}u"|ui[e0#t3@ӁHN]\X̡`aJ3ՇeBذ:f[OiұsNdhJ=JІE%Q dM"_߾yӱkʙe\2WϤ =5+۲7wE50z*dt}l~Xl0/@u룉_w{3UvqsU l`j^^QMg(1}q_K8XyHzuKp44.)> )>qQHUow_Ь@Jxy5tNQnb5j|{[n/MQp̝[7۫ёG(i,X̗ /ukL~ȟGw@,&,ov|r j/AplYS3$:_nF˭Y&;p 629cTX^{fnFX:7fHH0Ԭ1(èhRjΙا;EfY10gf PceӀrN5Znږftӡd&+}ng]iTܶ[a''? 'drPv^8~Zk,.$;RH9Y>v#6Q8֌DtRi⠲8|bX*5}apS\}Rhrvk&IBCikOI-ǒ^~@CVU+D89Zӛݛoޝ"g*8P|m{%=y5%(Ῥo6 Z俬!ok[_< ѡ}|: ͵?h_Lh ۦ^቉dp1V3ZvS4ٜ_&=V2J5ӻWtPoDJnvbߛ'4tVX1>ɄFGZ#gKiwuyFc- r1qd{mOh|PPZ튗HWnd 9NVr)ST*OQs{=zQ1z=c;9}P=w\zYM**9Aإbn{F*NxB9Gvy}B][s7+,ζEUzHYTj+' mʒBQv4)y3ّrŦl-:"[C"mvw=ڇ:\Vgw蠓D.'vXm;)`71v w߸0)}C|ѯGI"VmܢXu=v[zv-&X5N]֍jOO;ļsOÛ:< <g9P>|tӽ~JK=4[r\~w'пFJJՋldg`Bf !(~^Θmlר ye~0_泥Fz|\S e.(- d")r+ l4\9T:">dTRjo+[yo#)B}<}9C),`ɏ\N >]onn?>$kw@Cϝ |E5~ٟGߝq~t=xtsG9y}5\jⶤ5Hq3+=gUI,Xb.50TGR45悦j%eDZjJFr_.9wQ]J䌪aj=aXL&Mqp0ԉ8hU)eɊr~JRs+ɪrދZ%N|NbfCE3,+xY a7X`~J$sk+Ő&k!-b2foXGfY P# L8bYsb%2* YĕDNq-XCBr]))S#Ԇf7u/o'1Ӎ͵y'3,ѷRw; FԾ/y RF^0h Ǣ>>w ǝ.qpyiP :BrdR0+`ueHPCAϨD $Fj^X`עDIS:ClɈճ P_VK|gVB>e^ (\n0OC(9'b .hEHҖTYcZ[VZY\mO3gtSeb?|Goż {v4wgGORhD2QQ/ b^tb] {~,C 0$?=rC"_%?>z=qW06gao!DDH1g8_A6זWWPΎ෸:4seη4~fο0 O?!`)|;Ig/'}L(RLv 46T?^ LE>OIFwۛ[?a[v$m |uȰP& j*XB;W Yd)}# [0o!|;B$ubszr" ` +EJ& EUEV ]USCWsьʗ5gdwsUFNnG'?* p:9W68N.8g̎D=ʑ: J)Ƚ0!hEVG JkkLaTL0ÊC6!'Px*U!Kir?Z%vr)'Kuud LYWTM~L+@V{0IED) AbDVTV;j%^Ml3 +eI 'WBnkAB+K\E˺ ?J ^2e&>`̗yC:Ra8z qd(LTcK2mTBRQr$9+n<ІՊ]E3Of ui63^0\34,X_Itq91pJq伓\hY"5%':{`x )(N/%;nCLYmf(żErWئH)M5b6NfrB $VWaL$J9:Y {WzJ#D"':3)o/VfvDDMvnu"u<9vnABrMAV-9'8ȇס7eoFOӸW{b5Bޡ}LҸ}W܍{<qѹHQhp;+;5*:ǣAv W6Pﺵ00ѝ0*pwѭgOv맑U{D H f=)Mp]i94`_Iax%K㐰 Ћ% h+! vLTZW,39t dҥKìQ%:eS/Ew'1K+fN<{,̦ :FQ)o bhLQ1x'jTChRy#:cn{ |6`֛uK&4ֺ吐\DdJ+֍ƕn<1ZY%H)YTB[ EtwBm{9齃]BD-Wؼa!7V[^֎2.s] zv%ӡV?8 ֏Of8tZkFPjuӽ~0CcD4Wj`@ou+ex:!!{6%kN(֮:D6&{_.$St#(oc,WXɢvW!3TBXtS^KV2GfH>%CMw\ 68>ޒvhi$ֲʃc2}@)WCS6g/zTg/Zuj7he0qUӽ~W$GGu @)NuNx+`-+>hmuLC3LhٻME`]~WZkVp@Je7NgB*Ɇ pxAU cF$qbra%J"iSUウkI߱frߝg F6fϦd"Gе,y~W$XlGQU6\,S"Iko/1\M! ,\-W$ q-? J>w/_`oכGD*$Ȝ"f6e H83UJpq[Q 6q?nL*$Q%ٗ) S0eY!rZӻ݇.hSPβ'\W%"Y".S hA>]}"4~"3Dr'`j-q%aٷW| &SU=f}ӲR؝@ՙ4qE&wW]JIf6Օ- T7afz{.d|^u)ú]F+Ln•ڶz[#LH *XPܱV QJ]zQ `nwyšc;II:(MBa(i 뿽 4a`T'77 ݰ@HTȾ~yM)b@w)mpL&Bw2>b8t oN\K[⢧)׋qߘMJ(BZ*NA۫\%Y0<=\QJ^Ǘ5yÆ{[/9}{\%cސxHdqfJ*_"> ǔl=cqţZ~3y\*$B\9\f7LT+c٘ꃳC*t= &)уQUK}) IYWN{½u0F1&{磰܁p=_j˞bŚKDcrF]Ot!(a1Xzb*z(X\ڰפuYTHE cq#ZQ:ݖ\I\qpI0Y"u|$_c"8C &#j8@n4AeNod`(,MGםbA1*=?>p7e}=_Mbj *iSXAH,Ҍ,wZ [g`vkŤe(<*wtW'pXܴfGkvn(aӻ>C4q\A!:m<37 JaG)8J@I)ՙ:Y.>'P'C-eSżc{J-˜!QHj9GyɹT\dTQ!knIN~Ys`#)i%sN Qg}H"d8<^s|)"*aւPO$Pgt|Rsp^2XdZAfʱF6 E$NL^q!VOD:+Wf* ׎C]Qvۙ"EEH2.-`:tN 5uc ?ց]5˽[6&׋K GQ< /wC?.>h<91UbHl>.ҫ.ֲIDe>=a9{eOPJO>{}'ICE4ESnqjTT1h#b.\O%ZU5!!\DSdJ`rvc8*=xBʃ*:FvJ/aݒ ݚg.{C\};U NU'^$rYb"ykLj= _~|5~!?mgb;T]a%yko'1ʧcNy)Vz8kG*ۤ9Xͨ{;v] 0uCu G;&7:\Ne1rbVea_ qC}'iڻ iYGYF:ms DG;vmпw߽++~;R]~޶vxyׯ~10;qf<|tMߺЕ7j.O+#~ 6cRjww@-4w3Nǰ$w]{;=p9)A애DPqk^S/ ptgn/K=*ʗo=˿ ^ϮGEMEÜ!߽ %w߷58i%!㡅rހ_vfuwe5mvWN9 M!Zйv t#j\gWڭP )O S^gN>I!_X ^ $ďQ{["w+n%o=Uї꼾@e|Ƿ}“)i$vɼ9sq&-|k#yâ*.O vh̋GuMIMv{Z0$~NN0k% JN9R$f M=6, }*=S#_j-H޳HJg`#f\y)J0=L'`$(Ȁ 6=J1N5/EG8WXìgOJb{V- :`gdvdm{[V 'Mkw~lg;޺ҙq_y6s.cD.>Hc腗?0]DR[C,El,[CXɎeJ&s)=rf6#!\DSdJ+v!5FZ'n<":cn *ݒ ݚg.I2Eٻkڍ5NRyPEtRǨNnL@GnɄVnMH3Qs i7Yx<":cn ¨vK&vkBB&g\n\sn<":cn'[.Z%ZU5!!\DdJ? Ynit-UD'u:["gݒ ݚg.{ b +/)|)'izwn ـ#ЯŝhR9=ؓ2z 0BSzE$&_64@1dծS@9Z9Z!"WsIS3-/4_m?r%s#.gF*T{]Yov+ޒ{[2 $7c'"*")RnlM(cISꜯd9t,L+ -n6*+pTW.Sh,JKjg>ZpĚ}w$ԯBu7SPr~ٚV-\!/x'ƎZ7QgKiZ9R׿ (/1'*@ddSsI&ۙ  iRUajWkx yU8>9{ ĺa1;r;9 dXS[Pu :ę))N2t7|;7nt8߳t\#=K3%R8T,' @")DYIRIIs$IJĩ,XQ4c$eUYMnl)2ݓ$9U(#TV4UUKlJ,2L2㈚O }ʉ@R,zE4-2;i7v*>+b8SKDDvLk8zH&Wbcrd#;\d?3^&i~_(G" t`n.MS22mX}[W xtv߆(io"7co0 #C;ԯ;gfEdk6ѱmLF7ŽS'n?1{Wi3[H/m$Փt,n纱2(SъLvf)Ͳ , r(ɐ(t)yM}]2 C엩`+?:δ Rqô:́pO D(IF=員Y'β!*_fejx5Y]Mjc -f03n@m'tks" y2/Mi^eU:/VqcWѴ#[ޓ!sw(rr&T5a*H*)#~<f8w!؉BQ$O3^˖4khJ$-Nc. ?1LYNүǢӶ7կnyC^勵ǧIWW|b^M>QB{=ÿ:Owaj~X.<vac]sCp@G@R">R7wFD+CP.5yr7r-$ U^t7gܖX%WbhEy7׃? b!ɸYvC⢡e ZkDebHyھa""hB%(T[@cǫǶ^LKw o9":]'ZI${XE?>Rx|3:ʞ,I3-@>^4z^W}.@ou 00AT OrbkJ&w\Լ%DJDSfXK{Ċ $X>bL L!G5KJ؜eYܧ^g%c%ܱϞRh3KΚ!{Iv;םi >{2G)b🝉o#C3#&DnYoU(^Og!pLh$oK) 7P)mni8(.E=fF[ hdDZv=y]K ^Rz^|7Q<+C/myWG(L\`s(`{VvO[vվLZ#w(,_;jZn*!lfm/Y?ia8}̇'(B䀘P*lֆc6YwAߟ5f`<:qeȚ$Պu<+( QH*q2𐉨I:'icvȯY!KPW1 $6icF鵦A3$fL=gIKq?|0 ЃKbnG|~fP%V#xGE8 5MՎ/=f٭okTfEv.By#>]k=(`w-[_Y_wuNol{}a;mMs2aϓUzuZJ3n_g6 D+ъ@G}Asq㹄k BOӞ~HN!{&h]m|w7W"BnoRx}aCݓµCCG9hT*\ƅZILYo|T\P0|O2:g}MYm`WRH"cSZNis!O2﷓m!ڜaTӲB"Ux1[Ia"QQ[J@$/o0gYPcoo1"Dj$if fs#PN\&Mr; dIel$a,??OӺs P_1|gXnΣs&_. )\'wcɴWhǁDα ԎdOD8! )Vc8'RL!=z\(`(-pg)%.{5BUu @ fF7Q7=QTD*! "OOi/_W %! l{K;\8Z?%,,'P|I20b;^:&Uau;nae/l ¤2ǜ#BV\ =-_7 ^pKf{YxLBY|ҷ{܄ёẖd@9E< UBJ5 ;fVapV!ΊOZTpSǢk%0A"jRa^yl`D2m#+2h>>}X<hUg^ù:"_{x; !VG.4FRƛQܥ׿_ZkGG?읈^w/5N6}Y^ueG70?=uK11*#bl0sMH`Zk)ޙ߲f0 }ylKDs'5Fb_ɯ}yzE {|d1t?~&z%Z^OqBWgi|Rb=d?|P0UjWۿF#vmٿm{1c HC:QQ=J23h Oq:UtIjmfYw= Qύ7u|Ammݰ%#]丹; C Š fƺ}nn Nj{ /X0fI51%T* 'φzv8U tXss7-y(_A^Sa; r0d+xMUP=F^eӽ!go1OvSAB*tsЉ;Qc8"wѮI8Ȓb>ivGxc$N_ %@S D2g )e =wmG\`KV:\d˥(phRӜq02\22*g`._>lc:Ħus6{u<"IX,ZT7$Zo@PwE2 cB*d\I.ekƛ 2 F\w ^§C|8v%0 0pbQ jîO'ޣq C8 dh_^3+C#VmXw-kX_ ts4܌rSᮽ<\H +=<b<?{Rf=,3:1eo4tq7( p 0e|౤OKG_LJܺ(5 3*&<I>J1\$&e$'uĉ^:u8'@ "9_X@Wݱ{ 1$U#XBfQ9_`ʙxAJpta&R8Azs.$A~=^%G?7ʌ ^a<5 1̵֐, Ko7I3ke8}ұSKػ6dW=m2R/X 8Y#@ E*$8X俟j7=3{E /U/-/#6E.:x28x0Yܒ_Vf~ꃙ^Wq{f؛c{W[_Bn<(`fƮ|\/Z3~wrduҁ73}T c鍝C \KU;QKsοLF~)JE?,?zl#pw_ x7I9EԐcW~E]JzAz[`6m_\/=usK.mG/![ ]Z~iw1 ̇uBE{4mt羝;wZnj&TB58"ⅳcٶjXCXa1߷{~r(@Y,ּϿ?o@^c*sK3 -|@,AE5aXm! }ve;=4f2}쏋tݨMj} XF~:}}1}ij`^;<y2 gc%ﮗ 6=/w?[|y6RUcJݸxSt8=^ǟ׊ o[).~_Z^jk![пڳT8oB2[nC_[:uJ_2$I&}tfUMa6A#pO p,YF'ͩ 瘳vdoU K%Th!~h 4&Igcu((`̋*-, Skc5q@xiĖ AaY0V &sSJ5ހClFB9 \aَM35Bʝֵ05`\QPADYD3GcXcz+A# "J`њx\Kk#SlWIfݏsTDU*-xʵQ! KY[q<(ϙK0!( 1X ,xqC=$ܘ$f'*k\~mO2^Y i$3P^D9HFE.M';^gOϋB1|0:Bp nF> K4E\lZBF)Sl43H ,T#jT30ZQvӄ DhIǓjQ5E3G@x#jT3D_ɏzQ#*\QZQI0Өl`4J Ng0OirZ ^5VN T yP $Lrt&LfjgZJ[Y3\Lٱ]#R [w6ʌ0۽xa?6њm{$;MhSJ_;rt ѽ %+NKg\ɐDVu\nbp)k~}""UJT5WU=s&kEgagh:w`Nb Ґ?F!Ru|A0_77HAH~_x5ۏ.<sBJ-RkԦf$^wtVvwzK&UrDpAr?9Ϩ4ʨ`ҙq7t#e8ԗԗx!o1Ն5s3,+97^,];o9τn8Y:)'C{M^ڴsO&=8lfKqH9r8|I¬YC CeDtQt" mrImZ\ִtmN@}NI>)'3.o5t|>͜ѹKsC%Q s:}<95aQxι*N.w,V=}9ӝqFW]>ݹ_g`Ih"%\n?98$ תg1F.>}Ob!z_wbHbڟDx.>\ W>u 5VkK:߅jNזN trU>< Fg'MlYFCyƳ~Cb<k*אNе 3vk 6[f^9m`緛j*4N_b!!}$&N `hWY*[~t}==9W\o;TV1SI<.tv C`<tr7H-I֟ANp) SZ`ҠBV(Vrhk q\nBZ*ubʰ̢ #z8AAEHptH XzE *W1Q/Pbpt$b&Avr=N^$#褴x*\*| dy"TA`Z`2z T08&"0,wĹK #Ja & |\x5ev0̮tz^v2I@aJ9P25Sixcg 񶄩 n>!ZYvkDЄ b`.[pE+hIbB OH;X^X5|Oè,KYduLꆅO;)oh@x$7-|!OxCV ӗOy!Yb=v A܎}&ӛXbv0|eQW_\hQqqQug8L~!J%_QhnӏE8ˑ?~|&tTZN"w"V[?- 8v%-*W837xp}' b_w*ٺdo6&蛪~/!˾]opP忯JI(.6Їx\36qΞNF#??~ln(!c@1Yĥbouϗ% G~+w?=Q UDT;&–G!QJgJC !hhORJx-3K[2 xz}8!_YhO)րaUI *Kb=$hb$Ȝtqa4Xr.Vu^_4[DQ%z )KK֪S\l>>|y3u$bM+*y_ݔTU.J!Jn_DQ8PL$\44,N0^YxeRiE̗kIԥz#{uϸ3$A~0 Ŧ2뺵+*(Gp,jSc lNyT̥@֔,TiOJ8Tma{+}uue֒J! M(.{*HZ xI 1CK-dMT(pyl2s}`ztTGOxB|q (1Ɇ.]0"|o"džde."Eny~>0v|Ju&PRj@yQVbo(q#*~('![gaw# P%8+5!W6O#s!Re$zEJBHFtPD:I gt,J*K碤ɩ }Jڹ"Rt0QR=x%݊K@743 $IJ3Sf4@K@30?*C +"E xݑh8bG:hzڽ-IyTJ9Cq2]nt)W!Z4=3 -OM2;X=`}F)!+RDR-p&Dl=,7P&e!tue*[x-+R-I?|]-"HPZg^ƋҨ9gy(Zi>L~p8Ljb0v M_I^ΔŐ5is4=ێ @G#=~lSdIѶ:I*Qv}.&MpY%i-&3TYbB 7.2eɗo 0JE{p'R' %$xI|JɈ̚|R.pSyQ"Nfr /zKMI Dq U$I0J>'"bk66rV촖5Ir$kŷRpÍQJާj K<1ܥP:/_SrJ(K," AͳiVBI2țQ f+M]7g(gYXl7aOka&po@73#d72 cQcy,:UGF9(6;h*ӛ4+79nlctg+!m[/+fd"dwj#q|]A2U!Ѣ5VwA}6؟Y{Їv~!֭G_<ݼ'.!o1~ {8.߮W)ldZ} '_}7q?dݔa@xbwD }(Ӭ?:h%hE=

|[%=(S8g~^@ӒqF6(uk69]MnAB-SNu@-7̜vmz탵z#9%a!<=)sIfN/:e%S2Gt lS,҉4?)@. uI>t=gp:cRGR絩 Ak*u9s>/>,tM+/h[jq>b~1`V[Y.mNs ޕTjB%a!I nr4+i (1#)g!RHRUH Aٿ7{im!jX$$7A<-ARrmu <*е6;ڴHp ">{N*o܄)SRrd'eʂ\LT7*3 tUwLZ?DDhi|+!k]LBʂS? nCߎ@ $(6@uђc.n=HjX*G+%[p&^^ĥYhhbrv~a4[؈t @BZ$ఫ Z.7+-Q"+d2r[J4 RP3Zǥr-"bpo?](Nxpk\ =FpYOhfi2ӷlz]'"&ln^<')vGCZmd0s4])Uqe:Z.:<`d:<+xL9crBZsmJc(cqTlb{v c8m@+tX"֓z"!LH*μ3F;O kctۆ F2Dnٶ;FCPc M*4op7o dL dLV/ *rjԗ⫈TJ%0Oa+to3"6iZ:G`+E+`W'߯NB_xf6;Y3}uhECcl"y}9WşUU!B偷6n#|z+т r޼[̔R)o؛7ߟ-^ 6|I~gq²j8fO7'rr5/59^4[r|6vS>7Kp=a涐4ЊHw}|OΪDZpFOR`oy $3rqRBR)Xj/.>+k?=Zz7qXNrrr^K{uQ1 Up]̆苕WQ)ȍ(4w\]:Zz%K|M:hS஗ hqٸf 1Way͹ /.?,4v?:y)zq>/C v \5kV3c:*tIxZ  y)#k;}-b÷#RJ1 7JjH0q+%Ӓ*.Q,RBF$ΚP#ĖmvHŠ)A-w.ݧ*|6JQw;Y~w}GzYva-{^lrvu$aa,c#[I -pU/~jy_OKBlqrMHKuSek@a ƿ;*ͥ sQIJuȃJP >2AB]1˱RT:Q)X7$Q;|wӣ|֬wT)(9d':SWI]Ҫ9}ldZ3\3(uiJ^E_k|"9yX݂8q}II8!6eqK\/OLߠ&JbfeS_OKW7e0{ n7WݐvƷ66p\sM&!\0չE5 4``Wa*a O?JY h:nW\]UX;Cd9R8DvF2Vxxϯ^_|naא o]UEM qu:~:]vѻɴp '_7q?dHhnmo8>?TCj]:B cOi<-WZFTrλFtVW¬{7`F)! ܁%AnMT%W)N>N e2t.`*` >%m}%eH(J<ZTprуuO'P*I!/aX5b$ф + *!/tIY&Yz8+nT g}KX-hO Z FU z%_D- -c+eb Fst!VZ8X&_:&9I]^?V% =}ӎʫhoC/<'./,ʳ +6Eڋ;T<G社t.#Y&Lg&sEe[ƸMnb-{3mځ`)`A& #8j}^ω;8[, ͽ?~fZ\M'Yv5}H0s8u ''}7;]U:%tv~hetjjp"s\"E B5FTsyQ9u M.k1D 3+^JEKc)ɔ2&(ܔ6XHI8~|9ƚ-k3HA ]#$tJ- " U >G@$4MN G_ gC=7]/q{Bh!>}"P=C?M7pS8T!2T<ՠd$T9ІVW8`s+wE_tH(Ckҡ#YT4ԑ0: y:׺(&!jNP Ex/=ace^r ]L%Dr@I:ޢoiSLwiC;Cwp+W)R;-s8MXm*rMFX-S*3N`!0-# ]Iu*)\O5iEckH=JFa=E Yp:4+!.\@KKhGquթ {U1b.MLfe 5Tx&ThnX7BIR!ֺ{˻&Iv"hPLn9Bq5g $#IGmb }ܨ%2΄&3DE2hf7e<~H!;<hˆ8UüTb1jkQCܵ|7߯~lS&@|+ 4XDƙo3wg)$3]9F3S߫9yg5XQ[G7wdă)3sNis3$ X+~&HEϛah 5Z͍TKZaHWj'Maėxji V@ hڣ Kfc o >WN#RpWr=k5\Y.PxhZ< #H9&)(4D HfYY:+ G { o hk?f\B;\٧sP[x*pa_݉hSHaθת!BОRQGyjB?CI74\6/"K\DSu)|wkm(-UkknғILٛ$*ZC5$5l}Ԥ"nqh.ψj6;9΅쓣9rʊm`iņ:LnAy,󯗛?BR:UJQ?[fAA)U}Ⴡ0`,HKw%D4Uq BWs0\&i(iˤ={fk48jF #+10=֢ زTR9QˈKn`i-f./WÕ+9!^RX*cLVxKlr=~VPéIÃ6`\PJU  Y]XCd4VCŎ6 =~Z9{F{eu+ҪW&WΓBp4FV YiޙT| ??X,OaNF+x*X~wO헟&_p|- U1$/>QԀN&wtZNU5Eb$^i4)3pێWm&{bՒV_bC} g8E$ູ8ۓѧuIp}$W# RZt^9) ^Y^Pb $QWu}qT 7UCatsB'|_x8)ʱ>hILG]nݐs;Mg;P2,t<_[pNXe[< B9leR$t׶HhÌs\($˚L3hHl<4cı8֜O y'uqB^'d=)F #LH oTy 9li^nI,m(+8Ś#!`Y~w/7J"%ƣHyGMa= #rxos黾$_zԠj |gFԴaG/_@+ڟF?XSX›=ft5" hVBi{Z,?GH'ٿw@Ha6nE3ekd3 pM{wџۮ7=}>LVF7iad[J#ck{IڋEނ¸= W={~=T[$q-z_wr6w5FPQ$Cm,n<48yjܝF9,jڴa.{D[n+sąf > ξpz:|?7ؾ ^' l % 'PIgZn=| c tk- ؊RhTfB@DJDJJVP:0dt,0g*p!ޕ 1 (^!|AilklIiJ؃x Xb¡\J0GZeMk(-͂Q(at{-L=N*$+d,dHű0Ѐ>,A`WX p&TexՒ3'E6bTyuoa4E<]>,.^]b7!-3Ư3OW!MuXs?IN4gKί9*OTRZQ9+Q#?(|R[Ʃˊ偪kyhYD'Kxu׫f>px}Hsp;VU%nngUJN{7pijp;@lB+!©D$D1~m޾ ۦw*-g<8jc|ٵzquPgUUTTJz'WǛQ2;߸.Sֱ^sƴݺ6)X\#q=B ~BiԢ{*A?AŃ֪ ƆwJ% ,O\]͓6&v&ҍ??|׿c1CWj LS8d޶J{uq/{n2btwqnsM;5Q'k3܆_^֒ZڄEj '.W!Dg%*p}nWD>Y)~݇ƁY,&7w>Qh|Ϳֹ5pLM5,fOƠk4ALx n?Dfu!/xcuϼlt(^嫿s~-&WeRtȨ.j[ 4˛ʉ/r@+olQ2 z)A# 4i73LgLZr8$#i{BH6`]l-%k%RXVp4ij_@JoXcZ:}3-SH3ȀuKb!xW[4BbO+ =rj=QҤPfO<3C g[sdolI+ ElNDHt<_:Kuγ5Iuc:/y 3J3ƞRҚ ;w @ r;m!5fv#F\rq^;NPc&M2O@#T#\iM̀uWÄDA P`VKG'it5WZp\b!O#͋`PUR3U+TPMp%Hж,TҼ8[uP/C JRsa.4X{^ȹؑܪr5HԜ{`;tan dyzgO$ U($cՂM.$;u?뎤SD~3W:5c7*몋Ͽ'w0 So}] o쮀M{>l~jtgCpGnލkx\]NaFOa9xiwy#S`^Ott'O%=,DfD1+ ;_r@wcƥh5jm!]0\aehQ0ʻo^wx.W~!B%rt vor C[mG%x04lTlOAbMbLjm0 vn6sƠ:;ݏ@Sn$'&{h(&iXPSRxkOZƮ?o.`XFs2Gg3l 2FgS0< ܙnnשWsy*EϣLSmw3Ib۳ &),G X鱕/y|_I`z- څxOvvRӺ+Qzk^ eG6SKn|VLGm=68u[yR NĆ>Fyd/õpxJ=tJ͍cqnnsLy\+[X?_|x5*WIo\D3dź+X,Ď1C~q4B)oh*T⁉F稗Q/@_K+.B NH 8mOPΐ"<TaQe?Y6z,%CLYb8+-q,X%^` ԔTAw!O]p5/SkCct7Fq5\Ep&7hVplh̉&Ts|v@. zEIH;._2%נf l4\JJI!SRH *uO#iLl)'˒J쵂^kOA^eBY\Ҳ,+KѪ>V-q^{F-Wh! ACc) 41虮Vlc[ ,mllh$ST |+ ,= SnmUBy H+<#6D [6΂%Ua=rcwmHePan*sϷ_`gRHʎ"ArHb82'xntWU8K*[^S_%K( gD忼{w̹B~dMPPީIut$UW?h>< @bjn̵&o6ҤU N"$V1#MOwlS׏o!4Gۅa0?J* rKE8fN{2K,B^~_{Y=*>hG_FOϿ[22Ƅ,CXz) SXkw" ʿЄ mް/`}D8ao0rbK5=*]߽mg7/=FuZǻ]\:ZA*B%"YׂebK=U(U~U:f5y|UGUw=o_N^sK8JѡSPM2s lFpdN)9M|vxv0wsH~YR ݏF#rF@*R? "{QGsLpIkvrg!ۻ˗=hKH"8b%(ʦ=B }=FͻV{7P$JY F"`%5/kc'Ʊ݀|zx6W_͍͞g&U򳓻) Dǁܜ~153 0 5WC-B*]>:mr@ VPg3d|gH%D\S&/$ oJR>`X56B o<{ʰ"F"zzO-˺kQ-|-łK1tz&z6L P`|9|9㠹bծO7Ncq7jZQ W}lTJ&84$CBH |6ڣK@ayuŎh072U]Ue^]HFeٲ*( S[nq# ˜A0JB 4pl g"ek˰ZEO#QQ_}GHH@٬uƹ{*+1; ,i؂0nk^ w&\|n1U [=`xqQD ,3F<8{auه97ɗ44 =8%8V1;Y;k(G$* ad|D;/kdM>ӽb%E ƉPa2n0Hn *PP3bxvd֡!l Y Kyu~ZfㆺKN JF.Ʊm~ą eEc(mUqp(Z'ϝ]\0R1?v0e mp;] AK5*+~^S0MT& Qƕ[L&t(t +_, 1fG*sRdѽڗ+L4kfSoʈzO #_ 8Ouͱ:e2)kR~/w[(k#Azl \4yr^/5A?RB{ I2$&"p=C܏)7wOn<7S~"Dg_'q YLo+N)~sPd@WN9!1tkjI3F9ELbȹM~=xWѮ8ʋ*=:O >i_dl{ F`*F%0>υpe/k2C}d: Efa0w,&/G_kzЯa]_.3mќB\]j_GT.sD jkqSnaxG5fw z-@Aubw+1d 34mQtVb"J҉e_ ږ;q095w VⅣ.ȭ( >mLM_WGd)ԩ=?U?c ܾTb"J|:m$ Cm)[u 'jW^ ro`91:a`6qj? ZR $+h+6#BmK FQkƋ1$L:`Y <\Ul2LW5 $H|oUH~l5j"xLf.Ŧ+[]5>;2Ae|7y;|)pvHC{Qo+iVͷÜD]dnW:aYБx 5 HǬf"|q E2pL'viQdzG#KVIxq.[-tz}uں>E\+e,06<`N/N->Gm[2G W5.+^i_7isM%] NC.pzS2 Q؉D//4ȩc'-_ien™dn[2tҲv`v,1=k$@לA=8EhKNzb4!Z z(CƝ(08@pƼS`z qÜ'd+, a6USs( y"x*gNYdtQ|uҝ%/靾/sdW.١2gmp*}/{gɄ Db3Yr"eWh|<>?^xo 3AADnR=ǥtjB 8"gp"9QU ˊb`XYCX(Zbuϒ A[D3Doϑ'ftQ<#;QFgC͟ˆp:op%+B4lyNTzP[ܔ)ي9KVHZvDKZEΞ6Κ&}'EGKsjV9^Y[ pȎn; 90nrTm6ߊN^k_jpz;#H:.SnUIQ7fޙ/#m޻WͫWî+ ,k/vfQh3YXm&-K Ⱥ&sV%8 H a'crbx*UrH ̱ebB!e=P/; ˎ $zfBS'B!:>lCmTm6oCw(4{,~q&q*u3Jp[ؕ/R nC/!:%]}yܺ/7oAjP:FrFt2xxLHN1,( oTG/cN&(E  +x].'ԗSi8n^%5CpQ3Q8%]A%z8kv{]4BRɐ4קH ?Q>Ћ$H! 8Bd"/%sIb9~WFh*¨Op|  |R^gangM=}wxF6?Ŭu:rI6S2r>Z;@`T̷laۛ}%!綗w1k{E6hTaBQk6ۍyfw1U-]ԭ^!XcΡ|/Cr_SNֵk@9%,.}&P bdQ3fLi zIJs0Hvjn 0P Q,gHq+BvQmaaK-@aFHUY@wm4٘P}1 02 ̇рDjx jRTT&Cv}ֹ]Z/)f ^FEh >b ^hQc*ͷg@0 )k@&*AE1O!Q{:khLhd*;,? 0Z fbF-]DV2IB@, X}N7\ =1+*ɘp֩hL4h A΍3N"^5,c ‹" '8aT TB VAh!&D`*hpPy˞@  Rۀ=?@ ĤaBF<M*6=fZ\E-}+WM1~AlxS"ގy;r™QaY6,b2-L1 (l"ݍq|$.S#*("hHu`),rB \k.EJ*HfbyHD_6ӟh9`jr d(n̕nL-VI0kCUJWʗh'D,2' *->N硘]a:KoNʢ/|L{?n2aK0yȱXag1A Q^P$E))i̱ky8òN~XPst`+5$sDycuy̱x rfaMŜWL[r!-XHG +c\WNl'?1;Y^,*'/G>SŢvjX N %N NKOK߁CNK*OKOKNNd#%FJ꓍R'(N6Hlqv4h"*I @s X`8Pe=2DFa2d#N73ʂhq#ZZZS͌ũb^9bVs %lJV}4/WX7?^r+ą\~zz.T!/7S; 36x27F)\#m 5xF.`C^1s/| C$?eerr\3@&H<Ѹxbq|qFyA1a+Fs̟3+`aЇ˙iMXN`407=԰`c(;17N.5C|W1#w!,Jw3%v<>y{`Y:Ūr.P\$p2M:|# Ӏ^qo/hiNE߲Mzbfs~1¬2AȉC4 SZ} R[@'xuiN뜍Аwݚ@ȉC4 S}`ӻ trﱍw;\T"7ߥWӻ5h*=ǚSnEd:ɽ6py4z-wk!' LX{nHv -Wu{mI0yNޭ 8D0+wcrӻq~ijA'[y՝ӜJ?wk!',L)|MIdZ?[EHs"ru#rF{6EfVA*[L(naL@ȉC4 Svpӻ1ti@'xœwnM !)yӻ r-Wu{m!isngZnM !)tNڻF-ڶ 5VzEKe&dԒu]oe j۹\Igt}x]˜tF>]V׳ը%PBv~ H {F- ҹ}]/H Ҩ%(F[wHohF-5f%sWcnf1kPWcjMZFƬQWc>Ƭ՘AIj\DWcj̍Zf{8k'v51M0k]#ƻsWcn1s\} JWcj XḺ]17k J՘9]17i E=`sWcni>fNj]AK'~8Nad݄32sx8=Mqֿ>I5C*ige)84V$ 1.tCyxx}Yv/X2,>+g DB~ZיxNa# C"ʹ:d4J)$as?@s9i _ `Tđ.^Vw_Gn)&i/z:4꧋OrF eG9-.ݽYy7Cw7ffEr5k(o%HP xAjPh&0MpЍsB P!B~ƐD|ROm+H|}LZwxrw] +,pR}Yqbb۷O:2<.jhϗ^:W8_޻xlK99X ZԲqx8@2*d Ja5VdX& eY"!,&f 9T!WaY 0#T cLXF9s#"QIC,Q8S}U (ΘDB!p$28节riAYʤ 3P&#^x4/BzBY9u\_Ȟ!['$q!@% G80JZʒ#ePBuVgBt`K㙋X)q+:lI0^G<1"mZW!ATH"֓g"Sĕ#c$@]ǭ1iM-hs ȵX<{zRG*mx JK_&xt[&O.z;(4GpiKUCUz=eji~710r_qC(7~顇(j'f6v1BFiC~22B~I.WkP^/CfG}D~Bx^li?on"B߹1}wK}>瓀OVb zvq{'UN]T+5n\1z5lWhv 9c0 XGwQ4j V)o`r\Z- %9 gcrK]e~F!|6)d~;.fr^83*lXϊa/NaQ\GaCf1n QS#ư2 !W@ACBa 8M]H߱!8ѤN \G Bce KM!.0"Kjj y rMyC0 2|)T|WL?Uot+ב՜^J5]*exּur-_,X4-&aUYaJ>*I^ԩt:I9:}j§bg01GGϣDB k +TA&PO`R'FB~_;i"2zA9 kmB !D!ڨ(^ja% !NoH0K Uۊ83w^Yhx?ofZ|'+"iě5ӂ=,uf^~}|Q7{ Lka=n|X0XŬ B՗,}thZIf:V Nj:xHv@7nquu\=ݟjKlTW첎 z!)ġ!V#MţAI,HY:vq΃3Ԁ3Ly 2Iʵ1RosQi 6ՁiOe~qΖ3Dmm,8CSChvqNtڂ8f7Ug1Rَw[/Wa-BO:5ժvRQ0QbCkQ!N!ef0珪kp!ԕ[TWi|؛{_i/ޏ/?9φs/8y4/ Z,%ͫYdxr q}gzɭ_d%zHtolmdM`VmȖG{g1}Kn-J; m yx8a'It W (5 ڇ2BYohfc Pv!/|wwѯyL3 Ѽ~ޠ9w_{ Vk7kTd C%`5qг: \,}9E7{|p@m}zqS>UZ%yw>x- ?fE?pp[Yˏϟ~cVsȫh>-C͈!zw}/l"U!wnWsgS7UןA凴2g,j!PQVY=}<u_u IeYJ朵QmDӋW P]d*u% (%K y2P& @ ݅2y5FBhj PZ3&I%a=54ccbql48iBj)k$"Tm7iŹ k>G6ʎabSLC{$/ =.E \0-d@~Hv@ڦéz.I*wq}OQg geUG΀AFˋ#'<MoFkx"Ꜧ"a1~F'C,*Ep13ڠB[X pKtbM619dӽOLHqW~WIמ>:xͭ7tO~ :h)+0.cޑbO=ktE\AE"+/E+[߰p֑NQ1{x NGSO %K=1HbjqTiJ^ދ$894QC0*cFD2{Nڸ /[ wk.k}C@7-I L)Ya_FH a[$Zn?y?6=t !ae{-Xbik=4luB>D7|X_ƜR|pe TA e}bpeS;Cq,;Rj.q2A h" jwSe:.ъRlY|o-ct?pBRS9}f?EO |hw?7.z_9aZK_qOmnw{id\뮲*Fy8Z)Rq?,>~r-lwtp?f& 2/ҚFJH4zfp  (vuW_}\ISz VwLfo׉ഴvx 3"\&NK_F Wkzo}WZi sWӎ:e8c8s_- p[|c8Zost5Ӱ۵URB%aQKV(ĎatLs=8ߝpe& l.} nQ{&d}m e#H 9d-kD^?Gi1uYRJQ vU*zR=Md#sƽP*鯫믇|2+,לp m%(rK^2b;!!K%Ni6U,\.Fs\6=[;j(=zGhHQzehyk\ƃh&|@7RpJ FSIAn㗤 ,ІZQCjȞ+ -Qf'R&%hrնNGBL6;ȟ0i[>3gffG=cMEN8@@)%R(ʵDA Fp t"N; d݄ܖAt96F2rQf'& >bMדfSu j jbHH$3gE SJ$VAKAZ҇m ѓ^Th`2i wM@ .S+C1E[ͩ19_:-c h-iFU&r#6V1L5 *@Б~N3Oh8SlOS^cTf 2uSc>N;%٫7x!A=[Ub߽Y} hjXe*zSTdFu<1%j qy7>[x sIJ1.RwUq.އR7 $O&/V\T[4ݿL\׉o~-M%ƍ+ڕw].-?a2?oB: + PeV EIȵZ~N;S_LRܴ;v@r_*1KXP8l`Ua]牲wO@SϨ"*9-Ȟ Y<9tUeл/ɳ]_V3MkyJө-___=mƒ]+Rʉ(P_<8.<VS,A>{n }tD%) "w^J-t2R{@*O5_&og!^6apĮg!}0n~٤%4ßWV e,;ns%8BI) ؚRރaށ)E-FPD9"_UgBM*krXe8QvG$emymJ\?e(BӠ~rk[ՍDbvq~~%g-sA rYɽ&: O^RA Zjt﹚ڇjfT Բ!ORWd48=hCERQ[%# /l6˲|s5濵@՝[o5F!`WiDF9.2%eKBmPEDwface( ml'C]V=e[j%Ǯ."zF޵>7n#E/.ep3eo2KR. ز#ɓ̦_ԃ$ meRآƯݍF!ΜMa9Co\\~KC]nY=Lj.M%.BKғg-_:A1{\N0k't]{!hDZC_k{r/! xH@/x`sF~IB}fRl~9c:z)V(nxuúy8OŻ?>жVZ )p{I]$ z{oc ["tm빗;휏;h腑>i˲^ˬ<gRwZb*)=(U!J;Na⾗i:g\0O]XolZd⦹k26וSoz?OA0$T0 kVP"B¢K/AqnQ?4yl<ܛؠ)kPѬ)p5Uیl"x+T@:X^Ν4-,Mw ܓϜZ=ydKC#HcBu. Љ S8i&fJxb*ɇAI{MY27TP;! N0{~`-bbPe,'m852[c'Eo7A+bZJ@);3DnB|NWT7#B L3N2Xrq ʀCiJi0+08 ^.zkPRA tZwWx>)刡_MWUa9骛D]些 yc"b*ړNo. p ~u lo-x|k` \%̈́+L_Mh,83hHi) z(n!\[s?rwesN,Z~4Y}BzUOʢUCE"SSr8[le2D%֝ePկj3vW>I@u{I{>AP7[fZEP}Au\DG0 K1i,?A[`"jkSw]<Ւp4v~p!xpy}'W^'HoќՌכ:So7}%zOwv{ rBp'ZwI6.-t^OkCZ}*1בxlUή29992v {7ԃ@h74}QPa%^b!iX1DS!9?ƚX%Ʀ+qPT`%+vd}]r=Ʈr;YROc*|%!C$A3\[~~s ނu}?<{xsYDDnA^. v|x>rCΈ$oEj5T PˋmK%ǭ0EiiElvHJc~_Gpnc}~h9?P~+T%֑xSl)Eq.?)KJ`:ʄ"Ɲy b!Rb=Xf$p)CɇRO^paw˫C/nJXwS6MrkWN`&ŢZg޵;tG-mV5. w;+vy&EaWXkM[,TX3j%™"ـTD`K8ФĘBE POO]g|:OB vDT/Fs` ;86J,D&&ұ4Qimbmgs@@5=Z$}:/N+~Ͽո=x\ q^j*vfaK,,q=a=n:~xU'A;'$N"6}^Qx$kWgw ֹ?a9QB^K徚-\Q..1 .UC>>c g!}!˦Ŝhx>Ngy}d'* Q|{R.DκEO7b*w2䔩 7i77,[2̚*S O/XԦ3S^?USxu#krK-0~f0^<9wHZ#ͯ[@ƹN% pxNh{0F<1 ]1pW|W ^gKBZ#ᶯB |Л-]@A;13LI**&9Jm0֩u'Yaʒ~I"Gyn| ݳh,ǁux)㥦k4֚iS$X3K)E*q7H$dVfR2i5z&_)A.*N^\U&|bbm8ubh v;l݄r )NFokrr- }G֣<3S5!!\Ddӓ巴p-/ |G-Lxw-tڭ y"Z))Sx~8LJxN^ 5C.. zݳO4|.bs\vl)1 P'f-Lba'!e˔E#ef>]^b"Z]}lq|;e_@4gNgQro s뷒[#ܸX4 #;cP( *krv|\\1\S[#i)*=̨!֓Ok|hl:~Q*#JK2$kRq>EUf̨nQHĨ.D|;R:B-4 HSVw+eP뤉)Rm}wf%`**Z=o˭I3G}dnBO͔$d L(WL )$5,RDJn*e<%A%{:X gl"Ų=x<noQҜ4@҆ $K"c +!ßV{v:zv&S 0{Hc0@dmLNV&&M/nnmzKBŘ6`nHK7ǵEqno%D(/N,ɔʢ4Il6 ZO@LEĘ ~e뱵ϛڼ b. QzʭtwJKⳁNG˯<~YxLB"XW>yΕ݋xU.^Ov,̇F~,ϋŹ(hwcc>;Ns\R&N&&ղ?\dzJ|`/aQt4خּ(_,ص5򣯘/?nOjQXÛԖ!ʁvjx5s)uExaZ7!PmBX8"D!4IcDaCG` P/8หlKb#N4b@R<6*ɲ%Đ )3~[ك(mUMjIb͞w^՝s"ک "A_UI~R؈9b[WZ g-{ETR`]Q,֑FX%ЈlEQo`p? 38ص>m%{lé<4m:mX٬}6w麖V_5rg1w&!{\Կ(%o4;ɃOݗm#_T6|«Q!P ![:m价&oTuqMۍ d;~V9rJT*H*`wKqx5JsmR mVKY(c`JY@K)mAvχ-#ySޒ[2z] bm ^?7h0ZJю}>>>?gÚbvr|I|R]"[TV!s2CiLk('VxO1.V{D'Jꃠ %3i + Cjc0("McJPKaT {# Sb0P"b I ޵6v#beH*dHg"dfe׶r&-ʷcY|$opSªbK UR`5)&ġ$ xfNǪ©-FLFAԮzJˑE1# [ {\itAOv2uA+8I9}cį޻d 9+7Oys4!o7n:h}OpOgrσ 1/fBXw)Lz;3qawNDo.M{QDv0f$d<_9>ƇFO`pe{BVlŁ3{ctg<+`<{)1-aAcAcAchȔ^{]mًw(n!s`*(NMpTTr;Qؗ/Hu3Mc<_wQ %"Y?Tt9puZ\:K *%*YEbV@]A. Eq)`UjxuȒrHlfhh9R*M ^9-ˣ _I0_ʑZjQ `\L'N1, w O=)3X}9k̅w.!mydV΅#m6ҡ䪈Dt O݄q\.΂ \d+'HH(P[EkѬ}5gnD𙽙0IL&BXXP..לkFiٞ[nܘ˓QN{jrzL*LNeҞO(/L6:w) ױD[&")K$"^ݝL]3@ ^y'xp)nm\se`h>Q-+aL5ݺf/J<jcJ-vNA;?Xf<Յ[ ڑWP~;}s˻q{aCÆGVFzpW\O?ȸ-T/P%<.jRz.*SB&ٶ1z$Gg0 fT$S/#AL! J Q:xTv(튑Қ **-ƢEa%P-ؗ)hf2jl:SdsJ)Dy6lڈGe5˯ոo>FC7Ffx\k((#[CU鐡D YLoEĦe2L|560Mf 0Ot#KZWZ򤏟Zs>K9CycT=z8?%۲pqnꔢ 7SԸm ֋hӄ_^|-! %$q,_V𧻓PgQ hC/ߞ9̹\fgǭB&dĪ!5֬%\ *=ָb~.{s!׫6Ri;g]7n~.5\&5.>OO8L#Ȇl$(=.q!@b'UBLjd^,8yMH!Ը~Pdh|g_Ou52#رx{kGXGc84?xvCvsӷ~K&䳧M켛ME\\ԝ.]7w-~һYr5mFԴLqۭF5ؗ[F24e8hp0Tn}e Zg-ikBvu)r d %*<֛wQǾ|A::OZKeS[ YL:#f j1Iecm!Q k)i%Q]$j4~>Vݹo|ا^/K?=t~Qn-j\\YDk`Fe9=b%,\*:tQT8vHkV6lCQ)T -j29=iOvxaӎphΕU@Z~rQD,s9%%tJ"-ppX : 6PAP1_PJFOML??@' V% : /]<ɧWLԗZ=5rBo08Y_jr~J>SV&Wn,ӽǗN.>2?]}R$_^4ˤuGyp9-*1]2p5RMoUVOv y/!G?WQpxp6迭8sCӖħN_Z>4u#Hvg.mxxxGߋ ͼ$i:#-|vL 9ƓG>02B7 >ǘHZ* 'Iděl!{:(7~1NXi=Vߟ|<>Ot1whR/SӍ-yKt;],1 Vyʊk~7+B0X݈A&M?cKR9C,AZQ[jkX GTVBL6fVAs**?uFEuKkٳe,W[Uj?Ae $^YkԺWWM@ 5%W*GK10 gծځ_(dwY(CO7(s>k(/LZi[N(A_La L s0֜HwSM;I؁5-*Vv.{N|;%N Vͦvp49װ"=q[T67 2ŌXJY\AAnWi^́E(%D -vm<?ĮTL 9Nu"QF% Ň/qG8zӖG%x_>Nj5wNiz򼩆;(V96hZScDG 7sK  ]'ɮVN~Dc,O>Vi֙mgF􁰲~/~CsgR}ѓ֑8؈Eېs<qܞvȟ z!vgV+қa6b=o)م2&r經;#0`s6yG\7ӟ;hˤWAh:,:==*y䔴((jQ"˚tDe0kryPIя3vPoD;qnbVHOZۍ4?hZд`b7PCaĥJR5 lE8ld]6Eƾ|A>|PvF[9o zf1EKh:5a] *xbģR2S1MEE d%{c!{yGcԊiKLTh>嘜L`\ecvfwQgi;!={t|M~=[^V02^Z^hc!hzsŊW>SftoDbE)B#` ~Ga]mo9+FX)d_$m0{3f!hǑ}a-n$-vdG@+&bXŪTh8u<| *-sl\U&R8 |jD)F*"`,!f q*jk%\T E(AejޫMLyrAvU|U d$Iyuzپ9i,IϮg.?yTс]ͧ= GX9u|$pr{Gp Gk3~<5zorrrr֔fcBc4Th+m9e PZ9DWVjް@6~ 촅ҁJoIf:\FKZ/m$1}Η~?Z`hVH﹪ 0gmM.(3ih[mYxixU"RQWՙu`^;s+SMFBI~neJW\h*j-=Ou#l$G C%].)T}d-vb0R͓tEEm5^Z`SF06b$O7<2;uP3o""9c]q'ũ Tj)ܕO82'/ת_Em]הL:~~Lw~L!PFW \oN4kѨmQOM_/xۺ2 ]כM5|W|xe`2܁dD=֗2LwگY g!4V颼GfMJS--@kՋ`JGqsX'sVt8S2(6I9dc[?X6e9e#i&)u~tM/ALjxiɆ֔䍓=Ϸ2:K2:K2:K2:kh,+b+&1jHe%*`AFu[ W gSI0֖Htٞҁ<ϥ#>򩠰OJG?E7~\9R7|0K% X%ZE}@KӨI=(Nsg"tNQV%ܔ )(6+]m i~ڹԻL*Ķ$An"yG1JZlOd% &7ߦw#NEX*lYfX8%-/T6w*^03Y||鷱IPQE/$OZ# P PX+I|۽Tm۶dU3=DOZse3H-X(B܄y'I71A0Ѵ}q'tvOBuBs܂|l:^bю1^t\D[ɷ"?` ?'˃ݖ U7ހXLe='y0RP{D|8WЇ !T\?=߇y̺ p鉣΀ckXB׊Jx Fѡr92=e)j͔УZD.pj5%Cy"Bz?#&at%<݅I/:ԎlWq2gqޗ~#"mI?ίW'U+^'o^.⾢CM[t!9~rOC4+/8VL:U#MpJUX1u%*I/hbwI'Y\Oے@ӡPE2f SƄҲXOp8}z#-U=f`0XD'GP GOP$7W..;3zՈnj5/]1~4V=ӳ͡go>wPOg7y~27nn[}Nmzt.ncC_rkm;y~nt.'0ꕢyb39 t8[TҠ|J`W?d$ipF<b9 ؈Á`/]0; Μ"e8_ H?}aⵍhϒ⋿u`YK"awuqřg TH\/erWg?DqEg- c)h6H[7)ղ.BKKc:UJ|Үr0guUG͹&:?ު_iGľ RtAo݅?xtjxnf3@]oJ}H~u(/zYn6'F+W$"jd9}zS1rڭ)v;y*hڭavCBq-FݱZM5+Pb#:ctnWCN,o@} kJ`C lHepI e%c^~z;"3~$ ~>{y3kbXQKNўzϠHѓ܌Ss?=js}=\dD&K*,%,%Y? [Xh@thߗ)bJ#lzC{"/~kr9™{dꊚFX)~ܜ/劯IKEa#-PZ,U]ȱAA'U0DyRMcvcˢ`7扂[*߬KOmb=꙱W~ lUlUS" A:`!cY>`N|lɉcO1>@)N9a>HR:PQG(gTjmBk,V+C:. sDn=[.(QI-hIC0jGQ#F9d%\3V!~'a ZM,Ge_b$Ct);ZO]/EL48o+PL%hyUG8WVJ*m_hpjTJ/ԞEvNE-U]xI.(>& Atק]T,F_q*!Fy3-v{w#흼T=Qwj$UWg^ DymqH@#z2.$ N[l&EƑH)k-5)R(əLSP5UX"dٸz%1!dUG9! *>&OSod(F2-st .1K4D"/ws1b!h1W( ZqK缻Ph8F(Q#UR+.g+e=zܞjuapjjGgiT G#[vi}GE=y,<@fQHՀQ^r`Sc0xV[}]:L[.ܭiy?/җEvݎRWD\׳dfvp9 ]U RϷw bLh+'p h♉5(i;:x䠖0%-Mi,xx\ޜyoQnPpByFtC&>Cmp9ljxAY I W޶[tQ.S\|ڡm@V' E51 *|Ɏ1n{l1'珺$NH'nMͦ_Ϝ80U4G633sgtr*}l*/fEkl·YYYYSě0\UN+rܔTXA0NT <;t|rUz/>R޺ sйȁ7=7]y=bfr5\-d) iDOCd2^( BVD2PaE2A5J;C5 WGƻbBFo?DKevef敖Qf t~u1c?TuBPHƑP ٮB!!DсjfZY29=%S%R<180LP1Mxvh{RGUmov}_>_~ZWCMxV\ Eۈ0gIΟa{:[pw>%"c&J#Ť* ZRG7_h:$k+0(Uȹ" q@'#$0W~| {x^uhqk;y~nt.'0dD{/3]Yo#9+F1G`vkky8䃳 Ҟ<suos±ӗI]81;.WBNxځſbyOɕYssr7k xn%U7Jez;lLӜS/Az?՗QR|WS)h!;y*!wwJP j|hD%J̑+taÀMok% O*uj;V/gRBKQ~mg]Q|}=U j=5oL"󡘨J=?7Jj^Ga5al?pl?.Lva)`-LCEAG@ȁ;kas-{4jd3IRB5ghTSGwvk7X ٯ+MTbRt,;)bמ-S8NQɷK_9zתer3ƮRN}Nԭ- m r>8䃳hO*C&j3a5B[FLi%vo ΢Q<%޾Ź[x]knzIvnk!E_gy1Yiwb/|ROj{/bnbj$ $jAPO]&C:-5Oem%}J.o/pjDM}~/[u/bެұu*r(HDBW iCAe]L(ֻA!XZoV^="'XZ$^3d;6yJ>I96mTWPف"-Q#e nH wbGP07L=jvԺJ_rni5& FЧϟHn]o@f8NÔj7ጕٷ1&-1g3ʍ6Ji^q$/el׉ybCQK__UA$\],i݀.V%=Lh Q[bIN%Vp'ٱԸh.OapB2~9W__5}y߿-oB" 35 ( )0A m#(t% 'Xc#zc7 CsT s5u =`u!Yi]' &N7W3_>4fq-5) 5Ta ?X=Dx(N`5Ʃ"ɭבV i3/Ӡ+-V[tihs :֝4jjɂOi7A}m.oomUȽyٿ<9pM4K+nDwR7wS;Т% $ * 1B4%Gޱ]<%볅l޸*1)J F[ځ)bײ(uSXc q뿅N 5jfp"Uo}>SS cE/aj0`V!-F7,>5rBgSJ9_Ђ DeыMZ4!c|1C|ʸ$gӫ8WrS%M?&²UZ]Ne >MΥo*HL9ki[bi(FWvV(*5 ONG-nykv㵜Z( v'Nڻ}^MǞ84ndv)/yIYn/XvzXn[I9F<.GNj"|b>] pI`~UTD%y>2\ARUUƞ]UTiRih-gۙN`"8$ii/&S$`21Akd=g RJĔ(PoxhN `֜Q>ЙuLx%LxY$9`w:y.F1UՓ~$>i#R.u3yE ZzhGY-1. 2$NKyi/kͫ4oQlBf|߅XxKx '\Nzjo3q1gR F9OkRɝIR|Y:D}w'xrP /tfKINZ$q$9;OQEV3v5PX`rY]P{&|#KSjpbjVjdKWw(⅑%P[cKeMl57j!QaB׸(){G! 5˪A$%- (؏(nB%Bтg ' [ >g}-LY֤Y16T0^o`D‘=X-h dh nXcwu-}vpXܤiI\Ǽ[K,%nUgy\*7 jODy^O_ Hk˴^X;O\?o?Z1qlqCy5>a֋ߧd9ism҉B79 ).ϙ%(Irvi!]FF]?W?{mj3_ϾǃA7Zi 3ٌ6RA2%v,6%d|MaNe2uVZ3(Yx+J9XΐN^z#/fj鐑ѻi9 pJ) & i$벪 Xc˩SOi$ Y}i޺a@ iA M&'zj`+qޯج[8o^KA,lo! %%+0l8E ji%*x Ġk,W>.6l+X Ed(a[`-|}6q~ a0ߠatRh"j =*H@%Ohjx<[or*n0b@a˹(ZK 1, AJ*Q¼Yc |p$p{UZ|Fdwg $NQ9ETFĩoAih^(iF~D_wPS3M6"Zj?qk.S{E+Цp2u&/Vzre4rY~"B+3%nG/gԐQKQs(\(,306t Wq(S>|g10 fhc%bU O3 7\t܁5{5mE 3(@T]xھQc;5k ~ukͭ(v݄S7a裛%${MxU2L&}`n±j~ZggpT&rZ3ho DSJ]25'y IWy $ XIBMwddU%11wXY ָ;%SDż/dxA O*E^ІuH.ϥ~D5dZ\~\e'K=8 c7Q9XM٧+ĚZWܚS9VQ4]"h jB2%0x0CRrb>0uM Q0Tr83T)uYcn[dVaylC kk'Cv+ a!v劅[;D+ZiS>8FyqgY* w33QfyR2oke$,_ϾqgcCM6zSNyDKl q9P< #^TХU%=Lh#嘼Nt$|gmc'W&g$|4$Hb) B 7Ø65:m:.&Myfq8&Q-YɅv\wF_%TB:ݥ/;Cex\8Xz11`y :IMyzYŖbsX\1\I>0Y  _)_a#*1|~J g*[K·W$0{V#$ hcQhuNy~zf!\iE9g'\Ͽv JTĥ&fd3R Eb,G.{'Aܬ@I|$9Qz^} %4[?\+[:/RkJW{?q$EkC**N"~=+ V2]d ]6h<3H|;WW]Ǥava}PF L~ I2kI- Glj H *V>Fe8;Fܜ^F|χ W3{1Ym/| >"fWu@Ôbm0sfng1XlINZDb/ ­c 06Ma\ 1R-Xz9;>'Ԋ<괒MjnCȌ.-X q֠!6[Ph_U!pZ/Sjr6(QLH6RZi (}iP-SN&> /; =;1g>YKč$ ]T[roIz~# Z$] R+v߽2翄cW 2]ud=y}=u: v^oyM7 ^ }yysx|UjgAp=펮Gcw~nVF?|>mm=5ϛ?i R)CO~螙tzza\[+- RketOWLI'>_%X .f~zn/.īifb1s3nm { ']|?ӊ.ζa03!R:nrudtGH8V목3ֿN~tТ;(_3ЮzT[[0ϛG#藩gwy NM#m)IUodaf=$#4 b9(@!9|bxnn=ryLp GcBط]nL-?PSmjWxYK\Ck_eK&1CI (9xMPL&4plܗ<,T xSd2[17o"K/;ŷyv?ɠ]0N0k>>>)TRWW{Z~)z{ h&]͓&?zVLO/O{C|ُ11Ωn:ۍ{*~Ho FHѵYj|2 HKxD!#iX (&!h$Fy%'%D  5,FQ.Se^)XdPe#gaE-o).U&'C2Ab,E6KmS)!"ƸhR3[#(A? PZ;+RY*RYTF9+.QK\Xz !p 8&^x !a[F^1$Fr{a!{ 1ʀA 1 έDZyb-J뉿)2/kYGqi(4` ZҤJu!^e} 3pkIւ j/Z}s8rp3!Y @шz" (YT Sypu<:1cz<Љ |LyRQ,RQXԧ-SgW`Tx=]4C ㅌyM~q!7V:̼aL IX愀+ °%BXXBLGZI:T]~``@s]3'R)S#l<x݅ F(o@ǘzPӂ񚄺ΈNLT@Q' !٪RDV%;RZC!Ռmb@#ضw:` $QZʕR=-|^̆%[:SBop.Ӯ}dKeg?ظMUtV2xa| v2VuTj!TfV Ewp#.@AGg)M(ʏkAl(3b4YwBo],XK_dBB(< @x.3SJ[ꘖBA|T1 q)E IoS"^:E})̑̂.E(FHWwyn5o[J}/ь|;Y&;պfS|~p"Ϣ6cڇ,90_z raY`Zr<|i%#_^tķܯN)(W۪e?ӝC&_t 36/ټ%<`6L[ssd P^ɃzR!("'dH>',l`/QChE*zBqSo-?uU:5;<q!g~ ϫ=y*pdo\n8r1Ac]x,^>yaۚړ s>LYR$_g5 g$`tMaZIJV2$~,IJrIʟZ\wumzww]UH$d ?-9XrdVS-S#yД56!5dW:dHQ%*\GxLQ4|kܱ;={\{#ց!0{kMgsCrc"\k[![6=oe@& };mfy;$ogy;Yv'oI FPګn" *N89ve:|Au;;: Q FZy4Vkuz8vP:'zAj:&+8ؤS*p`)(MU~>lcAP{m2,ngqwg'n,ngq{v/0V*Rwv_-&2̺Ô >Ά`Wxw y۩g:/[k#p՛͋Mvމ Rŵ*꫘S[etA 2X KjmgԵF_RNcYQ]rƝvCq+m,N\[v_\~JRz0Ut(;<}_߆^)47bltǨ%&Xvۯ^w t-|+n!;++/eJ? ] B$)S1*gBm Cd4*s1)$E<)>Vj?$OdY1)YvViOӮ =\ӿz٫jRUVvhaf_PՎᎭׯj7pG3ۛ:dT_N?+lصiz8" Iϵg.dCZ[ZV:*JaAkc)VT =ؤKrJ Pj2vI?,ogy;YޣPpH|w{mQ!09;(%I VGp&{V UK, Z`T{q??- 6On ] r YOCX0o\ /-E![_:B 95p|(9X"1.ITT*Ϭ%ĨKl*[֛* 'mTJnΡ@,FsA䟨<.KuY#,d=[śr͉ug.˵]嵣Gw+ä!NG$C϶/_Z< *Ff+iP<4:TOJl $2X|AH>WT-܈cT}~bUuO*4'zzpbT5'|0X)ξ:{@Cޛ wZ-)B:;u !rWe" -!)'s%\,޷VPHFh(J`ŭ@ &|P`SD?yYb&%'#D(V*;FujXU01AgU3 GoAG&P)A $뽖.TL6 3MLsyP[/n6YFQԭsu.\06]tu N24L9!^|-$CHQCR.|g͆b\|R8(>?$|K 0 a[pE~{уYC_́PPb35(~ūaBѕSoG'Ggw=mIq )GTA {r"jyÚ48vKa3/mLk¤m"I?Jw-b&O~}:c):WK@wAxVY CBgBvU d5 JJ̦V9O%F,Aᚫ'۪M(c ;e{AC? ~gj;Gզڽ?2# Hcǖ$wU" d G~,Gy!R"tX Sv$j+d c6C~2稢ͮÄ ΰa]E$l#Pֺ 1AK#xElOMrQm@Ȅa)>bk[ۉ L&0" B8$%q"8z=k8%n !}y7X\ٶ癢Ȁ7[Lztb)&qHmwBʹhqܾ<_ޑR˵$XB|fy$mE~Pm- kpۡ!)yDըR!+)դ[lhzL2b.zQrJԩ,EtB\ˆ$T CDA* SʛwWw'^[@K B: k$@OebxR{ȡoٻtޯ#ˤ  9a 2EOPDEhQH<)0g[s,Jɷ5[kxI-;@fbX³', Ȯ-Dvm!kz94.mܴB"{%Z cHɨ ˒%!lNF[WNin\V*qoKkoCFUoGP QkMFlL+6d 7{^)˘dG=W$QW 7ș>H^` $ep> QN& mIs9$Iv$G>8Rp%2(ar8~HK%N4XȐɈn[KV: $ gF>N46vs42QTH mlV1)IJV޻ J LrFj}?x휶f,&,HT G iyGt彟IjC|oec+JFLJvQƘl&ʚooJo.Z=AM&07mP F-PB3`Ga$n2۰dA⦶'Ro&/[E8ofnQ BPqlA=O#?ضFh҆ϻR2M[#24+smF)7 H 2Pޢ9^01 )z|m3yFAI׏bcym5vDSwx~yJ bg9ԶGttmBANj*ƴURVE)},۾,E"Jߨ 7//nb-~\Ύ^H0N)J*Eo*!9.-aŏ]h]BT x]\Π\0h 9u6J@1;9Jw&6m.,Ul4OmwJ@j`ZHUV4hx@ YTf8DL NX$6JHȸD?ڶND-&Ӹ+i"26d?}A4b 7؋XMc@iĭsJ'B=P aMѠ2m3X'0(=-v8^\^{wHޗ]|h˂l:lW)lՠ-QeQA1KyF`Phz4l6@K?IBhnqEVh'gc!b-w4Wnj/>{G-EL~WX$両ITxdm$I ~h=v홇@De߈YآaKHfŕqeD$׳’blKPHw0N/?ȗ,x &+3]v|\v~ٚLUf?j|zwzZ0O'&a.&+5yb`<N1\`k),x=hYw?9x5{9=ǚڠ`WOl&Ygf!ĠrĦ'+SX'gN`M^hH{ylEE9!\^fALBXlW3p .JF«C!iC8eFª{Eu3$\ǤaC ,`짘pf1Q,]\hwz7_|Ku.,^k VT҉AC~K)6ՙۆ 6qA#\eyp!K)zJֲ=\&rF\Gαy3 ,3rM 4R1*z,)O dHDά)XhLQ?ftܬx;?!E'Aw^6:+,IZ"2O8 މzuWwAPq=&bx۸yVS.Rwt V`FtI&xռ$X`>V8 PRVCè,T(.!|`HEbѓI,** e+,R" W/H>Ǐ,JxĠ9i$ALR^5QbJfK=q+eDPK5$JG*jJpVL bn#)[X煥wӨJ]JUU:cH.kćjCaLӶjQ^d+s`k$ZYRҋ" eN|xGr Ҭ;!@C[L"f-2y" 7GBV,j٢e*1eL IU:W($T0ʉcEdNhS)ig8y7#Qm)~Y\_(,`0u6gVec9Y&]= aM(ZȈ9ELF2I$"0PQ78Ih\;`CHM+xlSGj TNJݨ'oř 2aq"NRARӊAr~~6/ /BÉ18yL0'3f* k;.HN-Ղ;ϴTI@_SMr2YBv*Rr9|"4/4m)Bf>_U~g5ZؗoNU|"#AfByIh2<$q8%7Gm,'-' zrW9If8#Lbw@J=9׷X5 p3_~,4 .Z p3Al3-sS4Ơu\ p{t1MQn %8'spqb'^I%U.4=Ã+DM/`.@M4l([ :8W̃CT pNc|sZIq)F*Mj{h zt^[1J?N ß*b%a۫dSt+[ OꦁH FG&nբ`Ag.n6o7VIpKaЕYj,sV&b jR,)+k$ nM($s+=ue\wKJ#%[ŃRlU[wKB ƘA)IAi@ M 8Pfb{2&Rۂ2K+$6;V. JX[eoR(T e)k3>VS)@vk$b ǹj.oTZ UY[&zS|nբOUfD* jV-bD n# Bb+OIz)G=)&*=ɉ{Hc=yG%_ɘ"6F\w#Қ")RbG0DRnR%%n$JX0H(q8[kM-UbIxśb)j%֎hpXlݖ^x-XY#:)ЧEwlE$o@$~1؜b)V}|6qnV H8O0 f(m܎jupQմ mh xx#TR+T27ap2W;&]sFV2$FUXvL?Gn-ʧflJeYCJ{Xf*9#F¬ë)Xj{GUpܙK(m  c& ·~Ё|!L0֪zav4CT X{!b+:hv屚ñ1J\k5mt.|\#"̯p YBBC >,nwWkO]{L"9D|9WR0ެf '_k7}Ļhq3_MVNdo'x?VHe\ѿ鞬N&e>r+y/ y\lqN.i6 P 9o3MXȽr2=BF?'(Nȳ\z&'!"fch/ZňM_D_DNf֦x'13=k￾~P/;qlcMJ볩&VlJY^JU3JF\Ѷ.ѐ?GXm:SCIDIyRȰ<9;TY@q> @( %.L_&7hkH3v`'b~Kގ%QwG/ HK{?Y-![ezԇsCw"|Aw06Ybn.k,I#?vư\R1HrS&P!+Oq V_?q^f1TW1J||F@2;Mb TA|3./3uAjQqե=wYY1(`*;Z`.n>*ڟ?ի6U0>43TE|o:x)W߲0[ژs5τr%q9+] (v=>u)ʈGL#OLH$@-N$1l/?,nn0U+ڃyF SĄLXxw]HlpĉG+V~j4[ ӳxS%tg/?FPKFK!~Ϯ蓕r#@E DMءlRc%ҋ͎ jx& -Cc)P-o:OcM>&OZ3T|hj3bm8FTh- (.! wʭ_ k/>{z9_=^{zO'2@+ݏ|7qU=tLcEeFJoR^IιUC 1sR4'=x°IcN[p-XH0zf6ݍeoږYf4?~Rzߦ~mgئ~mg6rky0NN{j:` IqJ/1M)7S;Y͵:v}xk1ghEnsўe +] vxrx;P]_Tɇmx\CO# ӮW@O1[.*ts==uXqD~NcԉqR87sa)(_{Z i bɄƙ:LJR(^0?%YyLrş Cs`uxơGi'ş N+`ŕ$cNngVv'ff:{`k^[1(wBil:<9vh-n<6zBtݱj@2fѡ0Wjg{&ijk0=XQa^U'A؍(O 'wO7xm<7zUs SnFwK_]Wō8^Ԉ߅܌ /F'N8f7c&SJ6NqCvC>kBwx:y ,o 5eϾgzɍ_mފaArfۚ%${fgR֝r7[Vg]ْŪb].]kz2㨆 5OC*cƀXJ/lDݥDB J&^EﲀeiNqrO(aKa08D-Y5Q :*t0T;dqޭAǧG01ћӬ{iƉiIqqze@QV׈PYEUWC:plG ѵ6+qAq S1`]N?|Tf$,];1tRNE@IR¢o ÎSUiI;%&M۔[J@zJfxC*W'ӅQXT)NR`i¬DK%lSY(,m _swU^ I$uBd@,<ď=k)sUɒި\F7?XΩTعBT2"Gk)8rxl?] *MǠJi )c4N2)6|:RagQ(G;;le- =pǚ}ݙq$slnQ,w$5WqHYgI@XNw+ o,wVNPeMHչ6RUwa=`s)q3.㶺.%F8nF[pUQ&#V]zu\|k@Bd˕Qr"MJR. C YfRifYA0*vz8dBb#(t:O 0g E 6JEB&i}% )`h&&Zi@>=!r!4ܝs vivpuTYBTB!XҚ J 4IfRhK`*U*举Z OtŵT,F->L|/EyљdI Fxw)*gQjH2vʨ8$'0T=~{pILSq"s4ILQYc_G`,s?ObiGVaoB’maKJF&6- ~f=@K'D[%'W-atAl h O@52:rv(Ԃ@rPF5%wvqnyQ^$Rs9<7R" &X\39AE.s Шۋq(U\\Op!e"^EsOܰ z2vF~qG2#U sbv93/<9U`9׺ sp39. tEɀO J%P)ݻH)U3Ԕ˜ saQ|˕l^-b"[xɀ+K11f,W4H=VK|q,I16+_@PbYfF,βXXTv_!W5B"Tc)o8N7mDOBl6H"qӳh(io6}@ /q8%Y'Tc`#g^6LT{4vd-3!3Q ww zd9Z= xcZK%#_v x5Bj,u%@J5!>CpA"LT:$7 iC4gɜi3irj%0(ds;QA2d"FD9!$DǡTkcv/io&hè8(IIH N_<K;.A8"C>a1iNV$hTCqسQ3+dgVkqa*<+V DurhY?C,s ͉Zg*SRq) %fbrޖm`In'A_ i$5n78y1+wBd)D <14,w6:Tm*)e%85!+4[gë--Qלc̫{GCB[?ɫʿlq܉Vw\dRư{2Z=|*C;SHk4yh[xg rJl$j'n&[ "T8U`qUyqT@hU!D [oB~rxVl`de2ZVS Nc5b}刕&bKZYrIxKHE4j&+iӧS/c2}:KdLDBB> I(PۻR&{XllRHm_q2=x;ƅd72wtcƤN(P8 L d+ BUzE DŽ뺃] jC utJ隈`h&Yji* ˥JTjY(M5bDKۿ~ȃC6Bge?AziRƋپNNMoTۣLp)H9-ZĸjI-ILxb۩bTX>0݁Fmfln6de9,Ha[w^Un:nr}os"K<:59!HѾ !xQzH5Vo4V Q{hrC͡QnE9mF};VXƛl)yf b:h]ZQrtyE6%K"ڃ8B@Z } i)qٸc\i0̙<}Mq16_h[ԛm4Od7vcdjsǕw~{q福r0E2;ì?|虷ym ~ӗI!s͋SYVsBtfoh ~O{!W^g@u ݷcC}*.[JpRZ#ms k1I1qnPf )Cΰ"9`-P)-E1aU30"z{޻^/^EB!+r2n[XngܶOr-:%Hd-h7;B4WP("`΄U9"iN r0#M"r*2J/,B U%:*oY_ qig@m6o ꜮV.*1yU-Fa5jl z:"rFa䃰;GRݖ8?'Wv`uuJ켏UCZ\h+;l!HB;$YqLj+q R8ߘ D ጪȩ xGngTHT8t65f{g⳼9r4Nɗ7 aym`Kܜ`]i)x f&٤ B"G)8H{ ҹX^1!R E$c wL@O:QV˄;IsS dr2L.:ڿFkxB1m]{um& Z daO(};f5ܺvj~/$ ^:+Z|~pA籴ekb3svfBৢ. }5 r|v1 < "~dmi1:_aYF}L+EL#: ⍪=TM](q"guf6 ᣇ 4` AƬP|BHfXُwFǢtpjv2B7Oyt0],MrCS95v߮wW; v2pXp֔»I#k#7f__jB^o{% *a}q"3L-'|F|HMa2 oCC~,\-!IK3ReNU#*- AhbpSdrLv]_w&jj{5ck0kHR<gKr=$u gJJ{cK{n4figAi_g#  y0a+fԦ̡/3xV\ΡjO`?2{W `ADYEAHn*_TT^@BӴqy-w_ )M\RNUJ$72U)RB \٥R~Ej ~)Pڹa0zm.2KoXb㻫Oz[%OXQ3;1g }`췩g?Keܱ6I8YM*&Sӧ/eC`8 1-h?-ys|·|=9*N{͟{7ӧ;s& /zo{7f7oO3O΁q\?af{7K]Ο{B[Nz+o5ppl_~;ziy.{@Qnj?q~]4~ 6ɟ _C߿uZGuʵcBmo{n(Z@ft2Au \^׃|R^ ?N+H2-7o`uВ=@xno=4v烶(RDDǣ#d KqDX 8T4V6_O@[ =x.y) }3qȿdZjn (F_Pv)C=0)8CeREpnIޙ18SŒW/TAِ:bxC6oC6oC6oC]ݐ ې9ps FvZ"IHp##JsNS{ *8~-W㕌 kJi_c缢Ϩ\{'?Ç!h 1ޘbU3B9/3 I =҃ƫϽ0d7 0f7־mק~o33N/FϲgiPevO<%`*\t@]{X_?8EBbZVQ&U)5q,L=N#S;y&M,"űDkcV|c^z*M*4&ǥ\utAvNdi5c.D)CEGUڷGr #6E̗}DdnçB%H/;QY~@}T\& x6k "63q8=ff3vJ]?fJsP4T5Qs/co>* -3:$(yΧHTfW-Ig竉L/;8;]үwoiDn~45?Tʂh3_Q[6k?9#{%HiJB#ℌdjI$1NU &OɑDJy+3xj\iMIg8pz^!s𧧶s2T^څH8Y)z$%.?tf-žJ.vG86[^6p%v^77Ѕ"c' R-kVŃN;s]%Ӻ|G~"P m|Ȩ̀1)r@1ZZ@ I44cRN\y)3,)&IT _DyIk8C_&k)>\U o*WȦ2w7lj+=@<;4~ֿucO2@U635qQ[Kg%qaQDH *%%BNvu) %P/,?.o@ _̰{f\̔ԙ/ ;4Ϙ'/rf܁4ODd-p v8o>3K1aG~izJNR/gnHDJ1%<Ǝ&*Ae̱Ƥx?m |z%1M68Z) {q k;˯a{g4}LJ'}\>vsh39/ays UsՇ_AAtυ1J$eO%8MNZFeRrLKNTT/&!v%9 V7A}L1;[vwvPam׼6)g~"1]Sk@.˷_fļc3?bbQeXic0^~_.xs #Q\ & /v<F ugsC;RR\ ! * u^5;.ъUv\QV8Kе\atL'P]QńhjZ#ȹljOHLj.[z߃ Pm^NgND%}1N?@$/$[Wf8QGˏƥ[6s@v5\M=ߜo sX+hyP/9`v8dpCpa"x"TEZ_u~ 27*][HL5WuVMZ妒R_PbJigZᦲR+_Ttu=It `ƇǬ<9;QZjUuWs{ٸJzu<~,9~>ء(70'CXPEsU1#^?v't㏵ oٴڮ>ab(r|9]snB֦Vvݞc۵Aq[Rߪ嵽m0V/dQ*Y@E҅E+e' -ZpUbJTjC跀H QA2VZlBa5o: @杤 +qCE+`sIE)%iDRDp *Slb$WL2/X id,`4iXKtFnK_tLH #=@` K#( f `J<6тOc,CAL✶4e9$ڜt Э:| ][_zb6~8frU<#"BdG;w6m#Jw@:aag !3+ӭuK-K`FRȢŪ([uIUEKV UIϿ~{cW*㜿Y]kA~Jtq{-Iƹ{Jҙw[~%\[ww47}?S7Vk?⫿Td m1}tuUhQ[qz䖒jwKV}U#C!ep)7$ޡ]^ƈ4~glvi$ Qa^Y`kx%yO[PV6uиbl%׶]$wՇ`cG+@FFb(U'bk;X7a 8ƬAɾUS j  @;z $): \*9M(`Ih0+)a k/Iw%SC< =9̕<ڱ+ 69e0ERjV]tŔ%a.6,e8-3CL2`$)BNd-2Z)ȊgIbkzlnˣr g$Kq QsϝˉAQ2ť(VUyIL(9bƴ2' $Y‘1;R'KN44wgD:|ל;w%r[KXVα:s=HY?,w]Tɧ P+&';̎9M fy% 0;>쬘,UFoT/'gP}hjK>d,*= ^3Z*m#}PP#΂yfaj" nS[y=ՓB'uM![iTbjZP椌p`C{R6z>Ъ944T,6!JV)(H؅Hۅ@0&Ԃbqj yX.=d)oE $ѩ[[[zc4,?.&wOoH[x%l3pwoS~[.:^o+wSSV#ڡv#\V][-G܍S,/?ic޸y7ɒ[q޺^qW59ƹ7Led^>.8YX2ubsa-r3)T_y~e٤xZ?T76[7ifaT1I[VZĖ9cͫ-R[oCYeIV13i-j gfLOc6pPbۢ)h~ 0#)Y`sLI-vbguSl}Sy}ʕirEq)4%ݩر2k*m=1FfOS0fiؿoܮ>~}K*cLj{7Nuِ6,F+l`q$K}.ے6*R$AN3z-l) 5KP Hn4Cv܎_*$U-T3 :GHNN[Yɵ D,:Owܳ9"WHPY` pMSP7=-Gs,A`Н>Fq31 $TIlX6ĩycY)QV{,Syvs9.Nd3>f"JZOdg4g6nyޓCؓ8M"^ۥ:`grG!zu"& $f+n|^Jُ5;jwjYҒ4s%un盥Vn!rܩO'qP/ 2ԞMS&dDM%o"b<_صFkEsӋMֽBvl \]1PIb:FBP FG:[G"gYw~#ol@px+>,'%9ckcϳyz;N }>a[ڕ^1TŻjnQyw@KFQNQR []IhU[-2VK)g zFr֎wmRԙvK{"rNk<޶eR`bٷ>SX??ӻLk%)g4V&cEعv_F55Nwxϙzce6K&guu61Q`$іם `x6򌫳cH7zɠ`*Bq{C'"4B&tkG\y":Oz%&]_ތXvy|V~:߶wݧSyQG!v׷'˺!(mtH&Z䉕>rj?Kїb-d ۹ :2?Y<^qf6:N*$R_k>އw}!_l{§9="CUrFmk,pC(l5=Y1C7{ B+4!iQ"t<}W?}p丹_ukƟf`KYwV.*g4?yeTwJZ zvcONTT\= 1p< KR-l- .؈qYoR:( e &Y )e_*ZC sv.m; IGz<$V"-Kכ> A1Q?gףsw0SzC=ND"NQ%m(fCg݋IRI=̢/GΊf^9-~yXo',ᱍ2Ļ{<'#I,Ϲr1`?%.va?km_ߚ3rYdk5a$Ae8ZMn:EXՌz&twi=AIkw@{@w߾Vdjr ;1&4D\\7}kiuVk-]vv̪z|Vq]ɫo/oǀ)Ɗ7"_mTpssE==m&FT뵽C0h2V|"O⽞t^tI{t].yb,91[ySz QCp䑸+n[VC|BNj(–bga7rCQz9ޛχTIW;Pq[DD;R#ۊ&;uGJ9HV@XEyxcΤ5lo8DFyD5 }z@r>CS?N!h)Zf |ȇ1֎k$Am?|y[j#?+qGS(B).*1 P0z8UqoVjj %Cu*&fg +W'Ov"(e[; 9r{q!/W$x65F 0H{U1e5Q㆞4с>\yc]OpRdܨb ZOp^T#RA"ླ;x.LV#ceP wB(H.sq6egB:&CMgcXnrZSxͣjDsXȬDNt::Pou:x1'$L0ӁebY'k b3`hfY@v:y:8%̈́io"Acv7O^ϣ 1EWNaW]+r+⨫zbdNL6N T:J%|cjfg{Q,co~+/g#:sޭ?7 ; xxtKnlA:*6΀J9j?G襤R!Ad"ڀpgvw@ ǽlmvh o|vӅo )liA39_7.s=-r| ȇƴ5NN +;VHcpVw:Cb |,S؋wemH0;;Q?ާn0PU([Ѥ=1}@EC- /D"ypH\>軣"R4zbip a[*L ,LqHAQȂ{`0ې!w^$@q5G Q,TdB; B=>2b-jOyx,DТ3Eñpk]0hy8g`2Y3]27kfW ^*ފ BJxWZ':?\4ME]4MEԿh9M:SjW5UܔF7XWRv{RMKh(9Yps@IΩaxo (MVo0ξRx?vC}gv9|Yp5jl{m$^aJA~M#RsU!j*=<4Vd-@YbZ*MeHP4 I]6+6],Fٔ2nZ30׆J̥U&BQ|Iתr-H$(q;mN2)KKPmÈ4XV\RnKHqkR6ZXk؅Bs]&RfΤBtZaA5 i,ILg^T* KIԧu(%znK #wq}']p g*/  9FB>XVE/!/AB%^/QF"bZ.#γߐv=]?UlIt=sZ0"xزa*.K1'fN%w炦7s07@İS  ǨN:I,;FV=9# uhejD,קܠG첰,c\+#&ak=tv %$mn!QW2/zuz=<.E˫{gn2w/;zs +{7v1G%ڟ+9zwj7sTP*FPά Agi pzuu  yy)$,b8cv`@6bܖ Zg)=?K,Y"?}{vQue%ųL%H8G<`/"%MNQGUHר (Tg49)C9㥇c{$Ne̿ vpaKFjPعyvwFTFL_*un#쬼3ɳEq=d8_m6u{chE\[a($ "43C4n ܂Av6.=9 !|ٟp<,Am0gjw]?AQ0 Ű鹙YureRn;' K+޺1I3ܔNO]xX>@#!욾K…:]K\(p&6u#5jCPz&dMKS|R g9Ջp=c ICʞM׃Dwc>m/peDN,C\>+{aGN3U;i$r,9?2LcwjIc6k n,΂F q쏑HhBJ0JeM1TH1Q# 6iE[$"&q-,]wt {<#(L싹\nC)>.l*.6{æn=l5aATɨ4 JliԂ±}@˒ctK(i^ dOݸ?OM*E|{6À~PD,?7LrfM Cl SWJ0?127JX(p?D +[ߋآ,Fl|6L$>Tuu׵&F*hUhU0"rC`2΃uΆoW$!ī=c #L{NF6WUɁ!Fcec jLH 2 aǹ)Hx&Us|x:Gumz7/ʅ3@OOB/,u/-o2ȇ=}X{oWƎ{u D ߌ3&ۻ3拻^lvy˟ Bo?wfzq5Zﯯ]9VBҚOOߞ\3HSBKgyYSK(z@PyZzcJeLWRb[fF1+'7ߝԗ^ng_bvY^pE.coM3E(dlSJ58:6pVf9>pt+l5!s./Ezl#BbC 梷v]e '$A>VY3cd{1hZFaunoB!&Fw r= yx,'*)ަ߬&9~s 5n2"`J]iZdAVJ뙣SpE۞:KI%/@p9T!4"Oc(Ѐn$.arS:fl\!.Isgġ(vWIiW݆Y)ĺ.wM/7ʻ秤$So׏C Ƀs$ǝی|"qL+K:G BR!J%Gq)F3muzt^_/wբ(eSVKUc1;^*/ޞr(Xƽ|)(FleQJL͋2"ϯZ*, !^R蒺XڔP]k]l~C:Aa̢K,1'g:3C^z8-;祍%~̉Ƿ9+D$G1f&`1ULib'#p'Ttsy"}t{ D 'MEIڃgKO6l}h+*t|vux!=\ @ʙki5wT hTM7u7u7u7o OਢdT2ЍaTRZDF*KAxEMY-B§>uRL=xJ]q @(FltM֮x&r;ݟrnhZ]$ d5>% UWD.ΔINR)$z%mW:ruVv#m\Zi!ƅ6, 1~[yFYȫwet`=^hWCۻoE}9~c[@e~?AH`G)Tz@8ŪЋ X}e?TGcqYq,3[o60N$ϰ]gvfƖcCCMC p-']a{1;e:Mk )[ 9ee#h݋뻋'o g >ޭ]tnYݤztm}3y2M"n[߈ͭv#OqZxABϣ}/ĽLf_[RuIJ9uUzӒf<*|]זՆOl,'˚wK56;Ywϱ@pLa;f研=IN&^߯_N||0:_q%Rv+zx;3KΙB5^-F!{>)}GiN^ Q u($R1 =wnJJԿ%^AvR@Dv`oy, ^Ja>vr_;T7P?pJ'.i"Ij)M )Qe LRtuЭ2TcmwLzga(/NgTu}vc5`C[q$7K'#UTSIxq3gմ=٬|ڜ>h/k3Al;12z=`/Lg *5"FP3tDΉɞWa!eO߰;Ljk}8&]zBc{=^A 򒉶wΈD\ַ}8;^[0'`m7󳧻nJP:g* #P{k *._$N 8+Ppao_<'xYS'77Q(U1B(Mx&lPq*xA3n3m(tf)g,'H]I!e%i59IbIޏIcGԆFZ'5l+>VluެE˩Ksa6,?}컧I2>|74Fj$shT^+ZwPq&>PQ${t m^R#Iͷ][PJP(Zu.$hS);\)!B?DXfX&QAnPwT$a-9HEJp9ClP!R"Ӳr ϢCb06-X;*к,/T8K3Qk4Rh&3ʫ Qg,tD1T A2ř D$Sk oM2A mH&3K&vLmR_-Iic疝EyKRX,[Ne׼4JKM؇/+fepk|}&l^ t<=,&O77%~'tsAwn,VCsd彁uy#@Z,pP0b.Q?׹RALP2 e+/zBIxut}EMԬdG'M}/ WgsȘV'9DP'@iQRԖ/ ;c+U.f7ח޸)~ñX珟/x+&8ly.Ƴ|w]||~B8A"LZY\a9/I pV/1֕CW1&DUk}7τ0,HjJUY_]@ICFX_`Tt+,y"Td(Rsk4UVȤ i',y4~ Gr 24 XqȵSBmЌEij{HE֖jpڷ?bxWvcѫc_;JwHs&$Z9$RiQk:ƥb68;3Vd891Y b_K T;j k `Bu3wsC }qn!e΂xO2Op:0ŷҴw=IJ=0VV֊N@5B7 #ord&G!O)m>uoYM'|uSpo Nϋ±7f=¡Tc8:v;{}N4hu«8$qEV&M/2nj@yasRll@gVgP.Pw=g8k`5Q'|8SBTau Z!@*J;9Y#ojSۄqf?úR;1El3Sbb֤ i-J\)D\j.s,22~ ׍7lb,lYE8Cl!lԬ@t\kUI*$e\!\fm`5'R9_]F.lf 'Nح)R )$(qcl&dv5"vyAs8^w?|V3.)^Ь7PCQPryscV _p㊌su.ĀbQFө!X@G\|n%rQ1MNS;hr[(Ցfw˘]cy0 ,Tl0}sQ 5` [9ajݺVOT !ykWQZ1MԮbEDeF!+8q̈ଉzmFCуm0j$u#"Hmm n]>*&'lV%+_Iu?>ڮh&`J$׹m2/7-LPO ADLcJA#ə)04b3'тb(KQYΖhaToU]nQ:HJ_:4"6AFPKpz4KEF!Ohԇm4b+n_߿>wM.C?l1m:ڂ}77+7[|aBϹrn_}t6ܢy7D tqP q#(MϥM P󊎳yV78Sϒr u_E{w.G1Eg+*ft.^Vu[}}auuqg0?յwN|+IodEFj8U,*߼&uXz{! HUzJqhowįR+Uadg샱Ϛ)J"881TuQ;Fz8+05'@6ۯrw`7L2Jvd,)E QYIR%SVFkkA3aat/@/k-xnW4F&U# ~zLMiBfU. gLS&2K# ri`y*GB+0Z.(pl},2Aѳe[eA{Tj{To` "n] V{pmtڒ{\"Cx{Z :zaְwE/+Pj(_dVj` -E"P34"w4ʎctB^$e2H.~A`<y{ތRD0 ^q{[P!qNaQv:`.t iu\)мt @YBd/(_X۷ 0t2m. 8AanżNESC>s{,ޅ<|Hj;I.+׏p>+VN#n:q#n%&F'QY"\P|[T9HɐJ.:-xi0>W;땟4-B2钖ɩ"-S2ϵ6ǣs[aY ta<ï[!Ò|BFX͈ Q]RB[I^^!X ¹Cbaet{Ȓ;4 JCV0[sPC\J>0gA` s6DN[}mYQtF:tvXt;ǜ:uy^m bN(C9`|<LmF {)~,`/‚E+{:l e+k%{goӄqŰ(G=g0ßwvq5es6Q+AeCVÑ+w`-cfL_×K#YB/`w_ ax6\N2RH>SMJ"p0,KNwU]]U]7(DtpP };/w_1~+MhLۆ]U]Ir%ȐW+#]W囐Pqi ^OUے.nerΘ18aw,aV(QGV2mDc*XBؓ*AɶDt;"Y) cmAوQp1X&C!p]dqL0a*$vO?ݳX [kTg.ξ(.k:u.Ǥ.|):ipwP'#؅kB^0KzYK-QK n2,mozsv2fפ: M, Ek%d״5j ס91Sʲ=aq8]IepŔA%UkÙ!XSaOM3MоBE'i Ml$Dv_>'/iB !Z֜u&m0KkQiJZwu =ci?5:)(wĊYl*-Tq-ᙓiQ%gTi]KDxT,>wMevqaHR&|8 6d h IDkͦ~ ϝ҈e%ZV`w̺`B3ՅZ0Ӓzu0_2GusLkxU?EDWBv:tT<|2O\kS`<#|m@I*Y:}I/hy,j&Ap9me|",r7|P8d Sq*20 h;yFdk*iDhIyQ5ARd01RDy9z=%"ڵtQ@/GA#Гw4,GAr} ix? "۳?G:i=eh4U^.!#T "$5Xe`0pZ2F>!T]>bx}$"z;IHO 0=cÿ̕l+;1TRJxl(9v@g:9lN^Dl{`u, c0BP2$\׻U %WUn=77 P<~7NZ\JtōZ\=1%|4Fzzw ԉ(e=4u:3籱Qk `v&:TX:,9ѪD&J9FMҡwۛd}4&Sfȝv]{I!jfwNb (wL`$6`[tqS{lFK7-usGj"q2߇xo\9ރj,*Pr:z & T{d(Tͳ70(2(93%X=+YPRY$R.{w`F<+Ri$T% l0XPsC.l +Q{PPqq5V֕&*Yu_,iEi]vj`;Ej:CSgq 5wێp}!k#D201$0: $hGmҁl7g/|)=S+زxK[UN{{1[C侟~$gϘ!>96ÝZ j7ly5*3ͬM+`}(L1y(y *hBXo:7^/ܿ͸}u؆,w1dվwsҝ k(^~웛O98[jw@{T3yu9t9/Go&Owo9?;xNQFNmywmQ Nׄ<\WI3>s4{\w4cl4GHtC[zx(z<]Ӭ x'?;O9>.Ej_.N{܇U5hzI03|>N\x ? \~} Xzu WmѝoS$n^PX1GuXSz$fL Å].taW ~as^z n?q0/ADDq}jzJv4x dT b _%G}!B}Q@I'&F36quԞi-H4;k,Q3+',q +&MC oD`qJ ~ #,8By)cd*oz"K 8L ߔW7&'\"z+)2%r;^2eX "&@ l=8 c)N5ng76|u1Z~}u9Xe&H>c4-)XR"fApv u``^`::n[:(v$AR0oGJ0ƶ.6,`}gl)`Rؖqa/*xqb,];0#Q鉤3N1~3$5ҁ{w;NNbfR %fzcap߭@y7%sb߿&yE_'|0۔LDHZ}}L?/wדURG\,GۻO2D !Z|ëb%breכ>גKfd"먀uXji{ iP-"(V 4pdSôUP-*9[mc"TNJHs-kbGLfa4|<57?vc8;^:Bbu_"G+z"3lYlc䤼\.)iI |I%t 5|Kp­kI`:8t Qϣ7:d_?0Nw~rm6$ý#߆ 5}F`5S$!uZs{'i>)zqMӽK蟢j{utF8M)8-Ty>CSH8;@ogpZk,/ 0m/.xfH{I˪h]l[<mq!|R%5Yd3F0dO͙?QbLX Z`CZMK̯.jo,~8T7/_$C6+;5H |d XqU]F^XhBԁ.]]'%/tҰZYZ:n YF UAq9cF%\%4%-%ܭ)0\]ղQNĈkzs''Y,pgLyWFel@ZibAEd\8k+m$9E=h`€5/kBL.)` LMR`%5MwKlGR}b>G>O^jFN_`yI$DOn^\Cp(;1ue>F'x<#0 tӉM_n$/OHʵX}0+fHO8N(i6HI_-nf9!.3V쮠I.wrLH'bڦڞbl}|hAUt埞rcEcEʗS.bJLgbʻyw9ks\9S&oFC\s=I),{FzSq;gIJ \ W`v]k7VAKju,l *e0ҬWw7M>x[e-]H/cGz1 r"G@(RZ,7RřSE3GQmmS8Cq<3OFd}!lPϽOZ1؉R#Ep(&us2 ;j3ϢtY'ģwjH GH5bBHyy'(q2 *4ٞL :4Q86O1"Xd08lυp M8>"M| 8oCLBX,NGEU ER7B V 5,gie7r?R$ nܑPFF1,Zcev' D%tOϫ|Q'vzeh 38,{Onߗ;)*~w'^j.K9rڴaXs=(<.E[uZ40ջeR`,a2UU~"D jGV^Ts0 $F.~m(%^Q|&i``^` ! gt3x3Qz%wTEA&%Pi5:x&wcLPaj߆U;%Io?ФfYCE%))]ڷrkf7 HMX3&XhQ)-.Eѳ͢αLjʽyeSzT{{5cH=sh]=vwHW֣:U)@Wd趷0\~C]a)B]C)7Zv*˟8=tI*;p宴ӈ:PPTJ@SE4XZ*vPBDt8:wu0}l ӄPCV~zsIWtVnf(zE"`?Ver'IdUJk*۹c뙗]ȪzcNY.h3sN^gQLd9] r~LxbH0tFnu-@BL'3MjFy3ܧ$Նq 2'b)XeGa*e~L$Oj}y5$ a$bdʛF\*/잣V}HNъwbvt6F8lA)ᲂX{Sf7[yJ?nJ}]'WБ9m`GCBJLŁw<~d~|2# DcO$*._Ĭ6"xW0:#tSe u(=CF LT;_2^qdh9ĩꎠby*kxmͩ}g̡۠k]ǟsl9!RiZOV{ DŽVC&fL{[;&#ljA^ U4%LdžO۴S C^ $Nv&a!^29[ݴlEڂ}4  R5U>j A=<-!?Z4`es2P0G$A g$܁44AxOvT:Y,ƪ{CX~=8dF|7Ge{Tu'F?^qv}3zJ)wG/ffZӦ(p'GaW$> SuByan1^ZT.R"cc=:T5+Ztfrw zV5Ade(zNmURgE mHڸR XƊL3RP%G+4:Q0+D% TJ̩Q2YdsoC3˶ Z(e-H(%i5Oau}@`)G k2+Wp-| y}7_fqDas,Sw!sEVl?|T L'<{?yUCV2hNqJxY EHa-1cD:Iaaץ 7A.ovtwv,2cp^Q9ǯڦ}bC豗_a!ݷ"WzbR^sל*ir'd^ ο.C ւ2yWu˛NR(Z=xUJcf@%5c%ܬ8`??E+0QXq _wR>[PBYoMI.h^c3]#->W3SA۰Rm?>;~b\ F:6_eL}qFy“7E ۫Fbv*R 2kTUⴺ/"<,  \V_w;4 p"`Bi$xpdV;hcȇw)΂[B2 # t&H7jFx[#`Jyec(BS ,wŨ;)Q <'gIv㉀"sXhreҲo鏟矧ɑ춬,.``GGI,D#lr${]DZΗtw1(=JV$Yunc xtxMPXi/$u+.!ƨkӞ/04O Gnە/2i+m ]&Fr>/.M$,\5Y+rsP`{myE+!7"5% <(u҃$I;4]ۛb.K㶇A'1H{ FJtX_=D7zo*yi\M9XIVIycddA5?{r{ rTh53{̓ۡnz#DB6T= BSU+m$})4FBv6YCN.1h^B`rk{F: _,C2x\`+uc$QRY .C_ ?zbs ɆйI#٨ D,5F+q"M+rTq1[8}JlvYSAmGV?.y=Y1)~Hy_5 (!ɨdRd8βYAp\X} );n@$岝oRIc ^%8yfj&^XIa2Yu쯟ʿBb e r>qaca|"8I?rh>[gF#ПypJa7+|DXz(|TVIP=#xK 2Je\XΙgN oೋa~pi6ofzuw/ 2`Y|/mm|:T^fq}q󃣖fa5%-](DLrW3u!/E~^f1E f۷ؾ,cFYƃ0LVʙIbNX;[`CFsJZQy9N,0- |L J=Y; Ր[:))PM`GJ!U'Qn֋BBЖ OV˳/$B*} hy<1mUI ͐W1ɾ咽a2:.̋2luWfeG`]բpDϗ!K"*Ix+Pe>7CNtq̉nX"ZE#/!.,Ico32+1$IO;C)3'0gl򴉖H5Iy1IRRXUYVlyZK;% PcF]P?g,C\%uiqЖ?TcjywR;mqs#Da6<ν|^ &f_(3h!7}vJ஌9rJ^[5g4޶l@V츾ra"dhA4kOگL}vΊ;a Qq缃i.7t.wuQ!S?nec9Ire#k!ZrK=U=EgHT+.kQK ]FțBF@fq5'V)Ïܟc'+eزnC72s͞2y}CX Pm)FSQ @cYOF QwŲG*(N!aڝKVS߂J}<1wݫc^nm{sn^5G^0^߼͕׮u%w=rhFCnmZAriT=2%j%/16g<.xGIP"*ӈ['8 TQ.پQ{1}=oA%hïpIhDe"CHڂu:9qC.oh1R=mrOG/"`)ĤqAYO(  `& 4I05o5U"rj, 'Y,װq} ~&`Mn ѻ;RnNRDRx2K)hAVtК{1^LŔAݯy=bXmOYhkW/W %22 W$orɛ\AXAPOy*@%L#QϝS-aL`I pDKu_T+XV5'nmΌžM\]SS}"-O'[,v$C_8I1$J2"wh$Y̞×)\+>I>4tTV4躾,K{eK!%эPI;/DpE CD镈BGtzo@TFeI !RFJqVGyrd4X5 DAM7m~A8O=[448Re/%@+dEjϥ ak ^24ڨd CmB~?y3 lPHAkkoI.{e$H`C0l@5E\eSўM^DzgCP ŚT]/Kĭ]M݊)'!7mbAG"$6a<fGV)?|CL_(tp>=޽}|ٔVt78ݏ;[uP3WwBO?颶's?>bX}!tv1ϟ]%10[;m7&`,' W0T"#+Olx5TBYN hw ~(=# KZ.1YؚCV.!(>(:Y\"eDiHD# ڙ^A^O @`h6"jB!!j zzoqQʫ"Y1dBq@葒13@׃\RJ{2W4ۺa`2M`,-N8tQ[r̢#!{ QɆT%Y>pnhqs|BfPe#Nu!W?Ϭ|* kMhIU;!FFAkV[KU Fl#azc|> C!IX"! >鄺SG%c 0χO$1JsݥqF i 1   "`SZ,\͘!1d%PU0ìm|1? YZf[Fp)LdjLB-VM ueSAFcȀ6 w|JbP7Իt?jш^eeڷ'%S^>(Dz«V4:: sb% eb0tܶmy^'Zk$:Ymc,~ϸ3 ֏@d-oTCqbz^;=+$-&6jt4[\D[iL"o࿿Ћ7S6whxV^}g2$hTe=JkVƓwc+VWgΑ*4Z2ډ{bjr=NlVYU_)J  }^F\2g2'-sL"rY\yr+.)5ztg 0x8Psto×x8, J~%xNۇQm/g{ŏ:khB`̍MApF{ndR$E1U Lnc}z8Wx7oۋY6¬~Um? ->UV"ŲcGS rR 8+{O XYJg9Dy&@e05F]TF$Wj~,`xc;-.D$f>9%o.72ȸ|}Y9ZKldc;_b2ys'g#sv=x|A"=/^fod;gg'kݠM6wB%5>:aV0gS~ Q±HE8 G> &8 HMl5‘QYrQP4(D-"Vq L\h)-п7OPfB>$^raK4>v55៶]xTŏ%\ I+\"{K=uĒ\[}0$Yw6ө]SRiC:JSu#FPgM3Op6cpuӜK-xJo6=0+fw/ꦋ5աpsLN[(-Xs;C_IJKW " FLjt5*TƷڂžn#'ES`e% \(kY2MI2(SJ^3'K!rz8'e|Փ2*{|_Bu AJ)AT,j2Q巡Ja y!P2J*i+ b7&G_)yӦL~ݽbG}w9eQJm$^]cJ*v2-1{z~FVnl~FVᲙ32(QL&l *f~쎇-e~T` :ywǬ+1IuvY_V@Չw?oФN,[;ʻ8W>Ԙv-`8uǓzZf:r+M#E祾n`t3jg4kd-Q*&N5ZO TNZO|0G| 0-r tdP$gr?}Q1OAz&f,D(t`!,D˅I62"sXJ6E j_&g`mlU Hl<tR1tMG]?b4C5cQ FԹ 25RQ!0$IAA)/(}=vpЈZF-9Cd t ?m37*I)<T1 ce 2i umlnS3I!@ ZT2 *oM1ʘ0{+.ɽTOƳj{b3U/1$c8[6=~a?AtEۙ-η7?yBWy OE!'5'4?QvM`3y)#G6g*m{3陉Rv>I Iȉh-*4اN8[] ʈNwTn;U*3V{e[r"LinՏr# {JR8EsgL\Z|N Se$ഏ:rA\F\Z-wmmh}NH:7+?H3p8 [?>[ PoI+Rk՚4_ $$`Zp1NDIP1fb %_~ ]nV@.p46]&eOofiRc8V%drQd 1f'ݫ<7GWoyt=Jvpf^">o F~/gbNWT4 8V(\FQ9\ {c L6ddnW q:;\X9 ,]LO1{\cLǽq_a fڷeF6d45?lk 3-,^h;RDs;+نeyl [7#fa0y* Jk)p)L;ŀhDܧoO8_8,x׷^e>?J#.TA@?LrIqQ} -xنb!5YTOͯD*c ͓cb.(7OnZ1Xs 6Wo֠c# #ّ#.xP4K^KB6B aɄIb#?Qqڝu99 3@kvu"xXkVѾ<]Q% -,(^M~m>a&i70ۘpKv_0Ei;p躬] ~v_JC8^+C Es&'Q(Dzχhms#v>fRD!{`G.`3dIQ 80C*A臱  quNod|H+gF@p8Ό"ҖHa '(e~ñQ=](/ T޹xkXӱ$i: ƫyx+5Z-vxÚZpP/@k&5Q=?xϔb.da"?PN2JӁ<; 8&?uҹ ?O c^͚" ﭹ1jvOxhЎR2SީtڟVi.:d4_DI8@#ίDK1ř@w`5!T`@ѱ-MXi*X j:ub-l]UU} 8&*)g!"aF`s B!buiE</l:cTܭd^"@:wL.3B Q[m̟]ъ]uŧrJᆤS)DKʜ6:XR8M`F-ۆfQ%꺹ǘlLhLdeLj5O$Kaˮ՚{RhdmF!Wrd҂ouu_-˖|S,2\vfQRMA=<2L'YmAeP H`{˳f$`|Yvp<}6m,aLi_\\?~\^}mGg> Buy ;`JW:3+Bx2Bxq)y'dx"o܌ZS-Քp۬f(D'5<< 5(zڍU(} 5)Zn%Ɋ S&B*[kt@ ǷI}<9[_@ nlm?UG| boTso$bu_{;C?1*y@uN[FF!kĎCR6g<.XPb I>;.8[Ц2VrYBT;}`r``Nj?7HpΎ.$v~AR*`['xnd5ϣ2k>0pgEא&[ -mWlwE!b-;D(<8oRFv%)!?6]X!B;1h!%Pq(t;᱐+'2֟=ג%g|MKB%ۀ.BL%mtEZvO۠I}BPc+~Iy ՐXhЬMa)4F9měۇ-P6( [*!k܅2MD|˰5i["qRķ = ֤H~Hlq7H$$O9le_6e.O1fe"qRķ C(!7"Vo !yÒp儲JѦA(䜐V@*c&}PCB/c! L%YAWy!6y[}Ե{k42\l  )["kٮa: Ha J[y| gݿɶ$ǐW>`UN.2txPmxWOi:@mz씒rnͲP ݅LUQBqƫ6h2dkؠ.[ j=ȡ13R:ŠjL'D Uۊ} 5''V4C|QTyD~{7K;=KRyq\7(,B7dX'Vom{o ~VHŽ/iaxTg3QUPCgk; Azd8a>NtOVE> KΜ+[2 B1R!PCU(VNE,FR„ܕfj,i9Yi.]yc}L8> xñTE畁X'4Yn3Y߉0ϩ<(ݡ:a_Cdx AƆn8mU! @]{U &h_}Gsj; UQॏ2C(˗¨ǵvE?6ᅝi:ی׻aR5匤ےԦO wqP?>3gfꏿdׯ_͛?{2|uI2{mÏ;?ͯ޼o/oTv>SN2Q}2Y:I&J׿{ߧ/ }˯Yn$KKfNSu3;:u!i}?K2۬2;毑e"M9kkmX.4ʫŨ ݰ'B{5Ul{ ܆٥CKfMgmQR\ۮ)3zB`nrnߜ:Zۍ.IOZw[c?G:v?=J3/sio_b!Uz:t(sBҜ}2NH=\O4a? ??*U&ƪdFi߭|?򃤟LCW'0f$d0ȼ/On:x?^,#L> сauh]x6\>dÒc*q FO3F6x[4at]uRSP!_;؇u^֫o55J*>ѝ?Ͱr):eWX}zh{AIМ]yz@ewL*yn+)s{F)= ]<',[S|[,{ eos+Y+'mG *Eؑ782yǀIJ^îsI]rp0Hm6' \yZiUi`F8 Xkp6f#B?zHLҞ.IZ聂Fѣas`Nlyv|VA<&G˷//uun^҉5“  ${~'B? sOxS&O~Wf8NQ,u\NFsb%Wе*iS0;;c: t ][6Q Т*@&?u~/j~1]*eB+( =Ť҅/9IqQ} fOS0! C&$H$(4d> (!DaDDu~3ɗ[7qʊgꋺS7i[T7\QdV[1ɫ*4[J/?|QWt+ W(VGa @'#0xqP,0B a(~Ta#Tf`$%c&5.U_T|T}MUh,Q_{&(g{&(W J.(K> P͡1|#3]m6+Sen*س}[W~ʕ3H\P%"AQRx8dF `DJ/Q Vh,j%m?&&R pǻ0g|2 C@Wf JnB>AS 6W\y7?4y!ʁeA(ZBHWH),T tJ8IQ(ԟy&1Yw(!X.*oQ/9# W^ D (# U8J_߮-p;*郊>B[a`$pԢk``6@i&89- p$Gu݆jPr`$ ȗ!ZO~\TLL 7AR}f<7[ Vr):Qax*񵞁 M,2 R0c2'mU*%Jd^!t?z-L* ~'ʙ,*? !=%-޾y5dT@'wF ?yt !2#-_uh&Bqɺߑ7A(:P巯_R!np@®F;6'bSFP%^_=,Vj}fPq=wx~l9/EfMHݡ=:n3 g,DY5٪m6[Uc8cn lFuڡY7{wZZ2y+.Hie3pMZ(^-fD n-=z4ʨp:wPJ0svSNX+a3BV' ,lYZzg+ZUYM6!R!ZVOЈ:2ڰ&VhR] j5 ֽ4L%GKD[X.Nk"SOYHRI6|x7 C xUfUf_*?$ŷ,D狏dk1o]< ]sh'r2g߷XF0F/lQDZ4bA՜ !G)a.DN=A_]x/)J4H# XAu^hKp@%jV1I5LNQȒ_G&yH ir$"Xu+bLTy+M D( &(d6(Z_auIJTz9޻ʷXʰpޫ )$e>4>r74ğl!NmwbB9fnDcXP B%o@ֽ"*Jn iP:iUtNRs[*t;JF[~!?jw&Bq)0 9R4wkO[q%*tN7 ]):${#Ԧ@E& ړ<7+KWk#D[zyC\(ƁIu%ɍefNT8p,Zr%Z7 $!|IYC->hQձZ ߈ᬱ,{)5*x7^4bԱ +Ro3`@#^2rc(Ss3C%ji"Z.z)#TQc#xH+3J r"Tx)Ҕ$4F{E@qwwk>O^?׌VVHZnX$f*qq'b$Xb SY*M`ӼRFrV%uZ70'R*8ݖ5CM^FpJVuT'pudrm#A7ӈL9h{(a8/;Ǽ%&(6ȝeVf{EG7 -.h8tV\X! "/A'bXx/ G8N2f1 `+8q]I(=1 Ԡe#>!w9Cbߋ;A l ,K'C{җa:&V5?Z-N‰믅3EӓҢIñ-dt«G!jhaHtB  U{[{`֚6#La=_:,Lv }X7M rVXׯjIn߮ӨX>~, s1 "OOVn7l]bxQh>ܻn(1~vRG!XGV<p4{|t7<ם/b-iYvS{6oGU +/L-zHS͢E''"4w)S"(i{Yzz_tO5I ߊT;N,sKQdU2Ӥ-ɍʶ|9- J:8SAɓa@xjچ43" cš]?Xm.ɲQs֌SfMFXӸ9lO6ݜ.3_G~<{1cƬᴙ&Ruy. \ꬴ(Z^uJo#p9PеDu 9߿Лw|C|巡S$tZ4͞}\!ӕ\ᎄLa[ +E[ܚt:7J+|kS>Is4c X(WtP.x, Ê"<OQȁt󊧚E 1f~f)coUlm S0{ 8L4 mNODa+=1DYBuм(!bwX!jǏ!N[1dMgmR5mM(t>;7IyAP-n$ń֘ǒt^}?7N ;P I❫^oq$G-bbcr)QA/uFwk6kE+m4VX_-i R#{+h쉭fCGfBqQaqXOFǝRs6=w`?1O%3 ڥ8Uw>U(]^mq]GM}Rh{Hx@tõ8JI8FCd(0eeR,too_/n<}>5W{o32q~<:(gok"RB-a9XsQrX+c\_T 1 > |dt^p5_e,2"8LnVӃրv^Z[Z}l<9q(@sWRق奕細Vkz )\ +Ei\'֫k8ؐ6  ;azUJs ъLln8c)c`9V`K˲8#l&֫k8`zP]ǝ(+ވj`%+.D6!:Vd8u34?n߅L} 2߾AjW`h_cpgE3$xXrJd2xk`Jn%S kk۰ʉalwɘڒ6|v|mX.x5yalRaWֶ^6?9^t|tbiPv[Č^[=jA&s/Hފh642IR؊6:٨% [ЊJ/DS4ZR1@E Qh:S_>hP5!5CU;(oAf" P[1K]uwْd[ͦ2ĸ"hCJKd'R\qݝ;D}i`T2=!"&ӉoE'$V{oA~X V3@fJiqўdy8Kw[.[1_T"o Czu_ݗE^P!mFgWfFRY<϶|IT=sJEmRjnFf?cq{)(fԽ5tͭB9eܩ23/2ke^KB䎩>[O|5 Z' YN"/Tn2dk6ʸԽm dq~M$6'ޣi~{=K?: ߺc|K߱\K$>&ȴtٻ6$W~]mbo`qveA=EINv~5()Rlrfq ErYOuuuUSAmHVkGX@S,gUdeCVD4rFFԘذwv^QQ_8gt!E 2Upl!_ rUhNR¡TZ(Y[=Z&jxgڮEރ %~B3k8}0)Sw8qEdY="bZjNi}CH%klլIF5!-|F e^ASލ*lTl!2hCl0FAJ ҹ?D]> B[.*ͅ9Pp0C:SApى&r@KҖ];Z+! t0qvd[@ |pr7PFθ6@P!zy;w8ŝo=kw~vLRj^Dfb!tZ@DXcyW]F- +QJ!RDi*F=)"$)%@p2he "sƤ>O#c8+:hz$JW*jW_R"F>)Y޷*wy3QnqShpERN&{"1g&q-ST: ؈kІKB0‰#' Q)bI%'];T Dg8{rFpHy 7M ]C:Ctp #i~rEmoC# zgpMo C{thпTb &YPrzC&* !عl*E,9m9sv ZcN1Ti`T80aT9%q ޕ\Iyg38]u°.xH^A>:鿅 JO )=!2'FQ>1I"D'eStŨD%E|kBAB %cTϼj|[чkԩ4/LvGertnȆQ 1G7g2hj*K<#<9NZ8 Y.:2G9f%xI:D#Q+&2x*R$ P5h gJv0F5'|eQhVCkZH.B ArdvaڨRzw)+WNx GAגք6'6~Y 3Yf(dQɢj&s$Xװԑ@Ծo\œZE4Y|#^$ 4%{|:먈%3\hʕ@89ߑZ"RBcoEѕsȄ&&8Q M'P(Ru\Rx5KUHޣ EcJMNOA pN*r\ե%[ZZ .~3|7?4^~wY7g?[o"dooc8A5npיŁPdgnMj\mD:r{c ]=?kB4 J~Jg2kO\E&d$ CǤ1);~LʎjǏЇDhqJƳ I0347ơL ]D!mnXszaF$!zA@Iiט5U8_.};u&+}lZoJn ќ*& 7a8cE (%|+- {M8 F*,PSXPLqjr\y׀3Av1 k JHowG,<^h3TQ= ̴TePٳ}&  vZ< ?b/Yl;}._Y([Ey(oq[9 X0pƥ,F<8 BG-eY 3byA<:ea$TK H=UP{53ֹZY5;W۽ֆ Z |d:EĶ?ШMLїKzP^1EYO%[ tRo~zp]-?^E `dr=oތ~VioF?~ξgT=\2p5gSID疧`lhK)'uQZʵιG)<:sOnذάE輳b&۩#V=Ge\D$H VPR`HpLYՄ(霗V:* T-Z\k9)R^\.BNP뢌 9t$eTj%_8Jv6gtv+GZ*JZ*T9 .ӳgG׸1!>T/ul4 Rx7'{s++-6?1ZVUQZ%UQZUVnMի 6c=*ܴ֝A=֠вYq{ap|+EKQϡ$Nhn!0sbꨀ,9ʷB[ u#RE}zI Z0LUSpZމ\vIKIz8iۚ&qft7u,'Ā3zʕ7){ʈFh *I~S㚇R0O[r7z6Mץ'w3?>Q8z|d6˿awߎqmAZ _= ?D% &i<Ouvs}8̺Xzd}et6׿AwFڎU1c}A]@u@ZQO7;2x〇xQ[~p'2zٗ1[yq[eAkmVŘ"b<%`q g}3<$ ?ERܭt_Ud]&;Ht9pp5*(2)1q>H X6}{n鮀j?~uT2t#SpQ6vg{׀9DT+Xl落O.;eeM qac/┮&Ǵ N'\ /4]kkN8!HwclٱM)A8&qOU_ZM^X' ;j,J5Qŧ9*&l H9ql!C ЃSN&(6)Ʃ#jHrYZ.k^ek\kYoHHFG~6)JJ,%1:ARaWx1" N4!H4Iԧ푂"`hv=%;< h#fOyw@PX RafH`!Z)yrDI- \g(*-Gu^Ӈ!RҠjl :琳d^KHTS= GPT,Bvl8-`}~D:1X>A| Os,))ڢd%k7J[?y#ձ.2]džAٶs("= qT-MtEGt,! uGDk?m%^JR\L"ʾ,$7-Y~:߽Ž/k~a?ړjVRԻ>'p?l'K:~nK0k.~ח=wh!ւx/+}TгP}/wg~6kqgV  R}|?vk;S گdG:9TV}Wd L03$vtTQf'zkoLX]mCwntYy?&F8L~[زs`'FRifzىC*r2^j7v3>YD+?^Z8uV!iR?}K98N_CSYz/jqˎԍ9v__d#7b"z X/͠ m'Q[ 5JWccW XZݾX;`>T_~=DOk,[-zo¸?R2V+D KJ@P,>  Mp1)'%mXyfV5+Z-Nf ryX 僅1,"uLBȸ`7%Q)Qk J NZ%V 9 a`X@=%*:e (Tvwb?Ji3(&M[4f5f z; Sa%+p;ղ5 /I;g Vd:}'bm_AoӠEtd;&ɢd2$xAIrH*?kMްɱ9D{fH[xӊKU+$WonlZ$a{±aPR<$5`iZ/Y!!dF ]}>6D4v-.mA3lT9P',BC(D}o6tX#. =15mاxY}VK#H"ֻZ`#wݢlyU  f kb\4(|ɀzkG=0[v8R6RU1j XA)wnZOK$۽ؼů=yt_nȢs4 خq{P`Q9!6(<ꜵjzPB:Eu2$I o9_Ocpc FYOd}dW_-dؾ&,1m^?VfZF~w~|X1Fٶbs1/\M\=mWAb C,~ E)DU XJhr t/b`=Ja#[x."J Dr^Ù!q|B=HQӗB+G'NwXSؐMւw@pV[$Y\5;){QɪQʞMbe{0>Y'QXӤK)qykxև:auw, .:QC)F1M .J%٭E²l'ep#kWljLˡ_׊С&PX\nB{r@~htZ)H VkiR$k)K:4Wu>RnӠjp=`zs5IYMQ!ntԚ֣J6FEO86 JR慭QnyEo({^z+P()'|:#}f+]ޢZv_VA]*2m=܍툸e8j'\ႷCm=וQGL0Z^zPN^M+ eK9@Kxdul^j-flkP,TٌMgPwq\l `~xoWC+,U߯7}짫o׫?|8[Oa/׿ʞavF+=8h#)Sp-Odb*rn5:EiX[¢ܢRնHj" ^(݃뛴&o#}{mا nŤrq`R^l7 OM$MiĢ1ϯn`z dka5˘ g2Bc:e(A"O*k09Y%GUMRxILY2eN9ۼ_meZ.q"BZ 5| %mmPw $CPiE|?S%<xV"(0>5{ww!p79"y\: ;WR.'4 (::o|" /2W5t2: 蒯M~L֋?n] |KlscPo 3WfI' 5\~oZϝC' s(9o+#h(Α|iƧ/98$\;=FO%MR)#d-۷?WAjM-PIIAc=*@!j[>Nl٨J]翆>'/Z$¬bglH4॑n۲ֆn2sA-El)z!$ָ2Ք`QxnlPstxn i7f\-Tﻫk7%on?[\·uqϸݷo.<\G¡V6)Ft>>bwҀDq^ TUӯJV6 * X`ڐOyҺs o|@^R K.قo ^PXEb.Y ]FX gv8{?ibU68cdT=jP:X&tnu¸ec|.ؐ/j㼝]ysYʃQl9yA笒q<ˇNw*Y1VVIw+3zA~^}i*M\ԛ%Ox,tlį'Ι#s+]kH\=Bz0+0 Wآ [ [ 9<>dZQ8w7%vW;ߝ90SN +iu3>_+-a*Np->O0qC_U .O޵6r#ۿ"}aƓ`q7Le$NJ$s[lV˒,/`gV]RXqA}S"EVm2I NK'/F-11[f:"WMeҜ.CܚOsVѓu~qc#n~X#bel8卻sgyǏMށT!i窀E,MuPQ ~łN<'}J҅@$Qtc]54 ՠj*$)D%uڡ *vrq$Hхr*Ja=9%Yǃf߾+Inw2&):k-ꤺTPԓ?MT`OFKw/eP6he=4Q^zCU:+RѬ42Nr:ݜХcTcO)CIc䖣b{p?~{Age?v"т ]{|s/b{'w7,$_ OWd)I!ERxÒ>dHJᕶۓyC{a0  ETos뙛RM>Yr^& y&dSݻvR11wTn%p]n5z"[M4ɦ4}s&B^nDMu:ťx[r2slVqM}#^nt֡wK tRQŻuG?ּ[zHVqM)wKpZI81k aNyRWiKR$rjTQsi^v^d|#? }܍+_+rD qՈQF1QG%v * W=_eR{РE'u~en:s}8h/à 2 ]NnQ0aeprOmΜ QNe<>s ~П=3})2xV/:,ݟ\0]>6{I$G+b)RNIݘYdDl7QkT,-vBZmD3y8(>St:\$e 'ր0KL,眱8j iy3Icw\SZ+w.FXH$ƫLe眑>Wn0^iΤB2 3̳Ոv;]Eh;pgi5<;1JeÐ$'~Vx$ S ꢇS7C`$:4;fwX"K= k=2Ȍp}(c3T֊qD-*lrO0M;keBm'*'N?.< X#)%Xι0\{f@(C ƐE GH9Uu/X Jh&r"ъ2BJ\Be8q@jK0JzP)ʬ@F8fS͌9q >{eNy懅fY[1!G ,ўS+](a@S(s&XZ"-q*SZZʐ1`&$d fiZ#( 46TB ^`Вhɓbکױɯ-8n{0D7kcZ(oF=^\֊~O>?wr@Oxb^;#4g qeNu|tc+>G;Q89Cy=Y!&4}Sx{BT5]2NV|آezQ2m(zOD<-)N.ԲϩX3LZZ%T^~yZjVѐDIC'[5IUyS!-ֱ٪Ob'Tj('+ݧqDPzA%~pbm*3D&Gnn(5pt:8n4{Uu\٤iZG5WbRCnvCE$ 9_e'pLu>~iR!n0&=߸nbtBrf<tЧ,<=I(jkߝ?]$*tb ˾ zJa%}@iGUmh )? m2K(l\qFIRBZ,2YhT {-Hb*UC}PtI8 _tik =K \SE*/=qkV%Sb V<}2(1u:FXpr 3 ,$BJS]N6yZҖiUQVK2TĺͿ\@:xz`{%;t *w8''^C^)Ft׼G?\8x 0DFevڿ" }Mq|x(7}'Rp_1ADI1t%,8 f ֞8QB &-0 ^MnI-Zwэ>»Q%WlWE:AXR&˗=МSF6(n"W1hS돇1 _?Lxw>C._Hqo%Ғ(7ݧi81s~Bq<گș0?_=L#"kwi@w~f3^qzA!%kV2:ڔ]hAE*RB8]+$MB9۠Z^;t1*|^7)]ax 1 K,~ a<IOAp>|,"y,,FKQRd7(˺L5*_&pw!A#FWW tIsV2-T3,c>RQFXKtu +It'!e* fG*dcVŜu,Ŵ #bJ OG H+fVD}-z5@] zs;-8(h/I6(4Ƽj5vA)+q{BOݘSsrG:bFQs7$C3ؑcc%J%퍣,oyʣچ9&ynfǣFÖw X7_[ o`Y 6vaN|r Hꛦ7մytP3ڏݯuW#@u.sބ ka)rOǴpyj'۬QnT3mR5נQ)|ܾ؂"^({piN ~\X0c5C|\P͡%ퟏ{3~1P.H z8F jtxwIEK8x() e?hԨFېս:FHjd{-c$ A79w/M`*D8ε[~q 05SRp D\ݞ˺J"*uk7Y׌ /`.9˻hp/)z%2ia'fq,e#K6 .vثj|Qhf):?_t|w_HG3M+ߖ?uْE#%з Y-# m=CZQ.}3xy"`H[` uֻk4ST`Iv9yv(/W}I]l2A\ЍrlW^*qE_nM֘2vJ^մ%T9?g5UXJp4q\"/keU $BsW,ՖN)l>cg*9ÿ_ *!m×{O{pqj/B0cFSk!zׂhD9f5K"-sBdJf3J|bG vT.i0UG|*nY/婤C$ccy%S@DFf'OQƨg)6+[4^x'_5rJJj*K)1WNDṶyH(!soINxx$L<]Hizݕܱv.QkjxA,ODM@;xMxp*Wfr:N> =]{y1 6^L%-Q'E'au}يXWLa8w[`lL=7қX\aX)3r˄ΌtNZnIkZk^;Ze/Y BR4rs/@Nzi9psaxf)IJ:/"T A}3bO_^Obl9ʼv47 jf\A:\ʨb!w hv={wg9J@4V ( ]1c2I֣8$efQtX?{Wq0/H1=fp5/ryhRwSk;x> 'ofw6Hn(,M3gJ}~x cD*J5W+!aZ)&!'/$/(ԁ6y[{gYj?ls7I|B}d<0z5;Or8z(:=,@ݍlMÎ!8p&D& ڇFEf7zDuԑ e! ihV𞻆(JpQRMA$)UEm r(tQVFw3. 0d?HNt}4N lH#0=K"r/AEpT>" q\kHK@ًi A\'oo'Ƈf/ی1;gq8C_ߍǻ4x{g {̷_qwb gt>N9H g\s4 ڼP;B[= iVzdՆE+h{3-s4Nf`m Uwwdc%9JGQH&r띳lF\] 86n=ܣkSWu-)0S]n+zkPQhw1+ـ t"*&yjQ$jcru1:'U*+şO*}Yef=Yvt No6dpn̿șvxtͣr/1z79uߑQt&~~^lޟi ӭ45e.Nl&o`s3_Lc]c}yPo3D):Mw~cevV! ELΠTk7+sFV2SU?4׀mV.ݚ/\DȔmjs^o`&HŅT*j^mB3\G\a¦8EOfls g1gA07[Û;F_^Lrq&'k_Gf6{?`=bDz3ęQ{<73s'dÒt\Hp֡T"żm0}tl뢪(U@"6Ԓ2А%a38K(! n@?'z)Kuh\E[ @Uǘ?ΆL6쩸%rOew}Vn*owӕKqg1Sѥ8P癡[-9} '֚-}[kj3Q/o@ {ѷϴJBZ3;`+_RaĔu _ U pWǗ@#;T3אnd[,m/ҘJ0=[瓁R __)Mm"qu%mJmOSd 2torzӗyBAo 璳\>g ƹ>+Ҧb|sH;b'H5EX_߬OQ)eEJ~{ |D>Wy5nD6ND[>\ɐcZL"B jxP >'\ɝ ֚*a1F\c>!{u_t=Xf"=j||#sA:J-OIU2Tњ,+KGHJ7LL5ts}>^akt_̧0 *΄:dc[4& jˀ]2 "၆@ {GVOϟWgyvo1L|񭟃]^yш`9B#ԵZ܃y`rzn cI`p23EǜsoA"9FbmJ&z)D'NZE"! Mp+9S1IJP!=nEiZ 2p5WLRR_!x TRr5(),bDDT]OLYP!TA!RkHӆ8+Og{ӿܹ3]!<MY) *0A$01Jj&s޷MP70L&M- @^&Av5'|͇" ^R6**gO4ʚ^Ȓx5uu{O-S1M5A3XVi~~I9O[ I᧓MVTi!8(~N{^Zj;l[؍tUI"UޮJ,ΒR7lw Ix?kՍG\*)U:$j$!{{x|MZ 3&PsfAy | 9/DzrFX _ec{톢:H3uHyi0L>`' C9l=uH Jv&p8\$Q!)/?xd,WC|n+ڮsٓ}C[!%%9pI XZPܑPCIvQ3%&J$ U qȽ{ts `!MƤ HBp#pMdR,p:rP t!c]e {2_ZyvV$NTNz6>}WW' {( D:#ɐ` 1C#9ZE8<5b3 s!'cD∉]D x.v.HbQ0B)')Z`+ daJJA{x7rvfTt ʪ#`*Pk4z̑Z(-ًVG $CHOE&FZe)lT \#ڲs ܒRR.rбD$6᐀ q f FDS{ű̡a*|Mh oGmfn`P{tu'4npV$ NIm2P% +Zx&ko.Z-1cD*J5W+pbZ)&!HюK1 =],IEkdlWi|J0+ƀ.ÌC"W:>Ô^ {5Ƃ97o'Q[C4=1l]iۈWura\V~lY+kZݥ[sNh(݂#ĄOSpY`޾s\9*etǧ&Uep{vÅ#ڻ lۣkJFj<ӸF5pt GmbBi`XU40(%崨ձ^{]Ň@d Xj46ND#qG;xr-312{u.'5$O:V0:TA!Wh{l^ڐP"Q:P& +)g^`!є 'IT1(RY5 Px3f:7-_S3]Мoɐ&8R K'ZN0c&rQpRi'Q#Uvƀ!^{zN5^Mֹ,)V:a %~ۡvݠ$_bٕ$L'OeAg]O7Wg+?'oR,|ţ*`V&Km b" ߽@/cS:>M, ټ?S|7º$|?os~_YXHu#;?ϱ2+}א|"ZK]nXXQŠ}GvF_x*igڭ~SR5!!_,S,v5 7O,wrX2O(hXiT*˭|ũڃׄ9fY#%'#´*<܎/s6nϵ%!.t!rJO^ D''/}D#YZ.xG!'=ALܬDkLy{zgY=ŒOYaZSzqT2EL[.X"{<{0hjBh*W Gb*Z띳lF2+pHr =s*ιwŭRt];)X)xʍvTLi7Zhu[[zc^/mu +aBmn \\SwmH_QXx?\)۹'_6ו~/3W86CFwhtRo;RgK8ލ 772pٷEA̓1-w*;5n1:#N*9z._{ovZОB8~䷙\ߚ- 8N8Fx$P9fq-"8hQB9&[n( qay.3Bc/J׊MUOle٩$l&C7#1Gel`MiijhZs [CݣgL͉ˆj74ts%APwiheIE)nJ"pCOټQW)ڠl1 Ǖȃs"Ȕ2&\, 杗Ȓӽ6-7mhEIDVvݲڐ.Y2U)RKenN`Ƌⅾ{PݼNЦ'ͭS]9- +HǿӟO' "K͠9nnm)!Ԍxavl.F^o ;"hLYa1Ok,MĞ>N@0AvT@5AFHr1H9Ncjl_\@SHڐ.Y2uvC;v Etrݎhg)m]kcڭ y"\dkGV'%b%[ ?/ NHGۇD&m c.8Xi*`N~V1{F > Kٮg!e8{T.t+";>o3M1'H"<"2`K2 S2ZqbxKmrPI8Њ 8hE=Y PE~S)m:?ԛf;L_`>YHWII1g,L9I|ؙ~$З@$R"{ h&˓TwBl%>/$dmU⡝}{?+mM?!&]j/Rtx^Ks$ܞLo1l&-İpS [XITL!ƋC2KCR /8 j;I|@q<J_o/ūeZҦ܎^_,/ڸRu,/? #yce .z đW$NK;21->XU5@=}r2QL0-02ϡԣ<`-p:"EQLoQ`X'j",Iv/`L9J.5yVya1sVq=NbDpKԅ2 =IhՠMXmy߷ (=vH:`)>,6Ԟ0ar3$@`YCİ1y8IjaD#dL8>ADPL{@1kP=BBEbkX9VXiEYGf{O*43w/p+.0 Oud;Isv$t&4pXF֕_.RZ[%=/걇x2#Op_5,[y#(ɏpY /ǭ;/FC\@Iˈdaƭ]o~ғ|tKduG%gCI tӁk>vu,"p/j{TB(eqً8uՓ %rX|}Zn}`!EHu~?[pZJ2uVg{j[T %#SL o=O1#Of6!J4_!ccP0V3c.8aT"•@peP@%"m*Y!9 UN/!"^:DgOC[$%T ZBJa[@k-%0P&gkf(J1|~SvDs d V`Q'rrʅXTr8H3F:ZćCJ_vNAJ0L0cAjFQ&ov4#a&) 3 ~Ӡ~Г ܔ|{Gכ4 c񣞹e}a^NB1 _}{eYě G.u{}b@E_@L`o.3͗Gw/÷(\SX/Lpw-?=#3-VKKƅUˏn^al!~H MڵM&v It2ݎ[@[ E4Gi W(+*H`tP;grQy- ;GHQ1!չ5]S-x%hm~:OA;er,>o#JmUgW Q2r[7OnUYܥm}R֋]M.>NU¡Ѯݾ*2 xQlA!)b8\ _a3Ҕqwc |mҔk2-fDXx&#F{`·A씄6/uA̒y0ۃ*CT!IФIC.Մ$!!qk035|btSV򅜐S8#Sn>+kq0H mPAH $ci`ΉP9Eb5  H-YD IT餖s+P^; %uȸIeZCXiϘC@ϔCUKX65@@(9yca^a 5Gs됥vJ>x߯q4Zy(=`On0/QNG.{+lD+["iVƢ;W]ux<r柮/>x]_E&)pjw!xn)->UKV,SIdWL V1]=C- @ycA$ðf(ÒR :=u@!8XT脠ʞ9Pq j`@f:rb{ɬk9<'.,a]'IBcJ<ךxaC4MQ:ki3#\$R,^AM(^ƌ=pɩה&9LԚlN$.aHIAzj酩ͧl+\(bDFy C\rhtv^OT6?#서l.ˁUGktX9\y|/TS,"ՌIogܯ]0]/  -ǰ2f5 ACaA"`ޑ+r :vfdI2vWgQZ⫺vY@XJ-͏x@Bc ZsO7օޅX n"FX=pUsGYK )^BqxhQpXz5I"EA 1bBeY'Og.-͸#}cT LQ:jɴsq\L?Pv-> -5!4 ~bSM!u!?K-ANF$" ~䐐W.I2%#n%hT bD'u!mwੜNin9$䕋hLxoaWKnĈNCیɴ[z{v!!\DdGve[*1:6cyd-uv!!\DG˔ ;ͤ@HU+z&RN{F֟M?lČxc}U\bIP:Ŭ 4~Ag0g__ݺIQbՔ|N;…_|rKÍ'f'ؕé'7FʻE S1:r)Fr$/ $_\='B½TBiM8n/r~nqOC1MؿJW_u2'|LtRS()}E%DR +"4?'dK\_Wt+rߺ v 2VqA3ϵa[ws:{CyCFr?A=E] dS HIP\aBq_" J/ v7# /EZ޾Δ'1m&w;yeOОJM/]BWj15-7@k^u!x_=Tn4 (ɤۏ\,'L:1ꝿS 6יQ=htAclLi'3ȯ؀B.ޙF[JOAb dA):?'UPIbO7i+Y%[i`#GWjnxsNsRF+WLn(Ɠ 2Yhg@#[qb U?ˋu볮U2聊5č´VT5f56!eT68(%d ֓nU^zNA1I++f7Ar#̀1ٶB#[ʕBqϺhqց57%T;(*rGCK)ITD "wW6d c6J4y+iʭq)%ӄbj45Ճ:KK޾J5C4tEzmA|uvv!-Uar/Zvcf ̗š_>|Phj]WT-o4TW{xbːbmyMD\>֟x#e ֘96"ɭm.mBuzazp"灻?%ӻBr_w`ע@ý'ݝMSIDiO^+p>%G) qC%8w[Ri2{;c?2[5j?+?{8f&1B`q"Hĝ-(||ċvdB|pFM=ڥ|<8QeEWn?2vA.(xa.,dm HY|絇by6a s' |Tf"bGHi!P95(('3fgL:3ȉ8'73#cXkjI%V55\Pllb10;ed45f,5OqL boGLfWyI֑+¥v7ځxE+V|xM"9`vx3A^ t?1O+:a7A1 /gRԚ3Wy_1=n>?"Lw6>5gӝJLdsӮ EütwsKla73C+E) jJ)l1w 5Sf̢[ys"Z୩#8UUԋLzcYgtH":J)fCJ~lB+(;bdg,'/1<J,K<z8e$[Nġ]I;ki޺ VYR'uEBI !M$99,SY+\'"'( m8C̰ՆBy'WBnc#>HP@ug&P*Q)&mc5W}M,5N( ?6%2GNxB%|p/-PO})UѦXTMAlދ?yKm]8Uo|{ p<ܵ9wm]]7t]zx{ZJ4VFS hBUU{1H#=kϻ5oɿޞps r[cԁޞ[=Dv&PKn=g.y-l-͊=eax"snRs&R?6Z-H?<@p}Nϻ']&Ҳmklںr)^y4wWDhkU $U u~H(I?>^>{nC<|!u vuE2Tץn}ZLjGj G(Jn9S θ$AP hH(I>R8ᡪ"rgA.H|L_$0HfSCn˂vK4ތ%qQ;\ƒc lM9 [3 z,"PQXRQB X~gυ' Cs2IC9v\yc r+qf/"A-K|z ]%⧴Z9#O;-|'!Es 7fE Dtpy)5Ýdp{蔪+)K1c@$Q\$I-Z4 cJ)PIzD@!l Rd?q + 1 @U]AvqmPK7Y#"Q SMY'-=#Y^THKfYz-C}E0a" ϭ!^@M9N}@!\a BCQ脋'!~PA(G@7**OW e\Ç!C䳵["Дd{:_q%WJj5l'\$j:=CK>@)_l'6_ Д.IΞ__|U5޼! =>yaqLwݠWgyuMB/?|7~yџ/_d y'18 G, ܳ0ES*c(s0 YY(A,acӓ~2^#*9qtӓ/~2f"'x<'7{ɘJ  ǵ3ha2xNL蚍Fa܍{B%8C /7g]_]]08zaA"(њ($,YEs_QH./G)/OBe3࠾|jOyzr{}B-:Ow rX|}@ִ ٍs@|Pt#(w]\/)3v+K3yv 2TV"k8NFI !I]Zr QXߓd#-08]VƓg496W!boGug:Aٽ=yv)StuCpWCc Qj܆P߷߁6m50\ڨEXM#2ir^7΋I7r_-7g8Aßeb{)~߾"#n˪~?l-{sQ]׷̥9ѥKۃlfDVl7=ƽࢽRZ5#֒4=S(A㛳߾ [Nu o߼r<0z4p7~ofe;T+@% [Ӊ.(9Xb\cD"b%5`zaj 5`%)u+\`z]v}Rh;++x5}/5Gm])etE@vOOJ 4h>Lr-x"Z\׭MTn MmRHeJRqoO_%C >q5/˜ Y:+{xeH>{yx_ib3^4ؠ*[W Z^> Ce9y2H[ D.M<{u뚧5FIf@P'@0vj3 <=z'#W jZzk/JNƲDrPb8[yrz050T ߮=Nf!B'9T3&˺?Eg*{~٤QȎM1Z4&̕Cxp{! { Rvқ>N.T{%9cT.?{ȍd/ !h;Ac'ۍta:0XE[YI0_JJRbuc˥w.<$쌢0iؠ8ԕJSB)Č$ᘣDsA$Z&TaQ Td#U:;;G|;jDAἮaESM18?_jˡun2\\Xz:Y Cx,X$Oi,nY|"C&2[`-0j =x7KW7wjn*2GcUuJC8%]:K 0IxBP֛":6 u5gs'jU_(u^;R\!:_)F5JcK8q0{u+/"Grg38agyP߿?1֘ED4֗5W6E 95>rjc&=FTOm¾4C/K 8oSP"qK'$y_[)-{@m?!S/Ҕv&ไν-ZTi,\ ŌtbHC>>0ܐyFe GИө&Xbُ8Yy&mTzbOY\$-OUAÁ/0|k.;[U;fߓj"c&kK0_ovWw}:Ʀ|r=*>2xn/MwI*sFn?QY[wy{x\zrQr3nXIB^UX[(>:F"O֬[xq]OVp )IRɺBfV- ukH M[JȞ֭ y"$S~?ox!:KQɺuxFBYs_Vp=]8W?U-̃)*i9Q ^ITC} 1ՠ!iD%t5 3v+-n/!.ՅcYOf&+% [IWlbA@6 aPX7 B9hc 1T9R.nQB AM I( H8ւBQ2 p1V) f1EV@YHҾWU8ONt|O";\ܩ䫝fI0v3NuSUŽJEqv ;؂m5wѳ>tVrYZu>՛ 7Kqը;Vc5,yes«)Q<^ h޼ćlz7e_\(S=ŚME#$Q`YQT ]]5b5Y436X >B @j*9}SQb&+\8ZwDHŞ!B)5x ` SD})O TR*˅h=K~3$e U?b^ *)GHJH ntyƂ3ZܤN]R'J6σ;Gc?oͥkVXW8.ʮWЛE`nvb Zfטb5z9q|-^Oۻ;֎qbbXԂ<7NWo8]) փE+SALAL23m7" I5pn>2|z3vZYVA@o\FbL7Z~5yZc<|ەZşXI\3R}b9Yc@%RЊxJ% X@YHMM UTQ}$aeI-bHXd<%XŌr9&* i0H%$qYPm } CM.aHr;<+î2XCĉĐ.g` s$J"$ I+58 -ӓwIv '+[;v3Ì"phw,}EϓOC6K B` Ŧ0l}9 pIr:&妕eZ.sE Uz+C uj㼫]O(;X߸i5.* uAǮv@f%! u(~5zfdE<brb@Q߁ovw4_,U&eqR`)|{pww`"<&ja</>> 0E1@.h}r$(qfkL?S.7IxSwGG;/][R ] ЮBR=8BہDol$mp1:0j>~peEdP=^,V28zM/h]~k7})$'gU'3 dۉ&&8d?,hSwr?޿ D',vnd42:R)! wL#+cO[{2=5HS$0fv&FdFY9H(VH-1G1 ٚUBؙsD&hN`1tX 1 JnpDY ƨc`k_GTT*,r$8k9"\[ c;]yK=ʅ ??BV@_TwD8pCDUlB]ycasB~Ơmdz[H~6j~5}Z_p+c~7IΘ znr\hn:8haOx Ïc!܂Pqтֹ n AF=u6= !1lCul=XNS|D^2opZ4RzO%`]le5 _lUfM_xBS$Ըp¢8F$Y]yq\b!@ApbRJy*H$TF)d)4&5`"Xt%1gS[ZCYDJԺ|%5T@% [/KD%űR„5T>ke#z?M^:}0+}P__BLF`ZU6 $"N[t*8G6:{gas(oܿQ9u<ںYBC~'~pS06t-s bDŭfcFbb]0i1LxӔ,h1e,@XClB!"DaśT"'4݇T"67GP͋b!rY]Q@ƑC-=B혞DB)L\Y(Y?)@BQB>+?!ՕZG`;UkljIK#%2j`V D(eabY޲Y 0^.p 9jNىT&P- ԉBDJjGMzOYɬѩH^dhKr)# O?KOn+6br͝trUϟu`.+c\ h=`RINg &T®BP%1tꤗv1:TAl` 6E>am+Z))Ɔلb-f&u+ QĬҎI\T h'q84ѥhYF$aJ˻^VPymT6X>o8)5l@b8B'jJ!:ЌeUWD,ZzTho''%9E[ugegA}CxS/Kr#{ 8#- >! 1o~Wf#/ޮXt1 ⍈CWϾZ ^4}N{L `oMb҂- vNvT_W9ǽvN:)Xc j#p.m!&;oEݛ<@, P fhftſNƎ1h%iXOMҙmvݶ##Q&{^jJ۫X 6Ouq' ]SJ܂AELfUK_vuk CCiV<#vEQ-{OK}7Xw~OI,̽w^(3dEF>JI /@3)E~8bBo?}\ 3٩SE٨]PGѦs UզA[7 5۫oOC,_C?~б]Av.>Oq8CI@[Or2jj]PPIlb-UdcdCUdb]pw+{ξ= f~I%ԸzKW'J7U;Lj)Ʊm(HBMA m[AՓFgA,Z=ck~,oT&sZ\;_[2qK"$ ʨ 4JJ}u Uw8 O(h~1`+rvDY&֧ Y饁$ix` G)Q+Tq8N8O Δ؋ĀVu58@-jK4i)O(O `$~MSÌ&N7Hm'Ssc3(d c --q]LP4gXb̓h;JjDߔ\r MItZBycMwɯtr^L` 4P cfu YD9H,*$&L`tRLn ]Kg6smD^ (m`x3X8A9 j{ԗ0#ẅڡYQhUC;J!2өVupLsfB]f2QVlmUF|8j%//):_F;;ߒk$ J/fsg[ӹzR莫>Xq UjnkmFEЗ`Vv, xE&Hf'6@^m'NA%%پ֓W t˾bHUN&35L}x'1m8FMb_?L}L7f?qr.`aTwojO kx7Χyn ~"&Q?oMBk v`h#qRŒ n)kc(%y)6fmgK.*i>Rڜ#։5+5sǬ|獶,m ('}5%VYRwwҰ{z_isĊ`y{:<5RVYzcW k]9_m]9ZG "t{&Q ɆFIuUыmڊ{NcTӍ&{ vS p# Pfy3Zx7 <~"aHg8Gk&HJjx}~h*H_ڹh&AfWSdrY8zi} 0U8uYKq sq"q8xx>>.K ȯ4ƛMq_Rra*PLI 0ו{WGG'U$~`0jgDC S44Nи /B@5:n0o76"5s%^$Y@ƹAǘҥCi:K n]mǒ+.! ,9ӽG{-;gX=ȹȻ|usL%>dh]lgǟ'*" 3䒐>"e>B#d:g=D:=W=?Bz4#%|K !Lyꌳ41b;LZͺ&JA7A ؝-#NM^TP[V yyWwb4ūPAJd2e2t^ "u|b#i-ދ *Q>/;;]}+_!vLgVisQqs츻|ՓVG/x;=7mtWUvSR,ELO+ͥkdzD3EkE%e2^D{Bu6 UA["i[ C|+*&a" IrSHk Ai(pgQ4Ny%ҕĺ*h)ahMr2xjfsՁ`ǜK]Ҋ-]T)&a0Į {%nډu.7M_IJ9$Ms,rJװCD*=(LI 2(AB#ר8=205ЕָGC H gWwޤɤ['tywk/LCϢ \{Xc &oVRI:xca?_%T'TQ")^GVbߨMr9ttЁ Lt[T(Dsd]ѢJ̔q:`yw MUU5rI4ݵvMR1U#"!pcFGA V+AnMފ`)4E9 \8oJGMŎI }$Ǥztn+!Qi@T@}tJયT@YscWøET@˙j7.?.K9qO3 }$ ŹuBTh}N-M#ud#بJY,Kp q{tZiˎf΁hֈΧա}Y\6T\OP2BT?'?RBӸWj8,iEpszpd"$*hwe{C+&!D/qٯ<|S ΊD#w0tQ )+A FCQ R4L(V* ۈZ=2i9<.rw~tTrNBYJyቋuL `ІR97İB_E}}L;*Tئ".B"RQQ(O+_-zawWf}%TkHߡ5zG4p;vudd佌OqՏ_n"{?f&iL5;R劄'fwŌq./kl jT4gTl-gQJ~4Ȗu0 ,e5ULź6PX)"-0QMљHr/&%=@@-Z& ѹ@ ($=ؚkCp&a#h-q&Q\b0- h3-+ (Wva"g_݀|*xL|pq8_^xS7v>-JW` kzT3Na@?C1S28h*FHW(!*ݎ^G/ _ c84.yCFs3R^P/7 wf2zSyqOR .?O M%WJy<,0>>+ l€ε\y^HJjFp1& x=|2?DHл#YbJJ[%;.fW{Z֎{RS4bC]zp\P 9JGi=qKDT>Fܔ&XWZz Rq%)/)m*'8nvk1b(ō1NrcU$hZIŸF mŰ6R㵑ƶ`տB@kAP6dR}O*A(;X *UBSa~%f3@`zG߮tڔ*cMOi>7\UJJYE[ىL5-QٝqvU6HO˧v*e且цw3<֐w*_%ȉ65'f\zneG>zl7l~Fw3p~zońK5{ŞT>b+ZvlL7giT~3{.~;[ͥn$FybAw@Eɺx-۫r~h5|(mڒ3~*(%;55\EC@w3%x~u \/FS,4Ы9 8/MLc>klc671u 0JϏ[DDpbmro5_[T:k>ɭ,tqQ YG `ǓR/u\T@Y"uR0͘,"h'@ji &TNWVGr'o4ISO%N_j-6HP);2[2}Xnv]m |)EBQu(*HPXZG/P/ eUIe(iMt-MIr`Muf 2KےE+ϰvfXv5`R^ J_<ŭ'hy|/u[P[AR 26ZF+ͣڧUK$'0VPLS3rrj63EA*I]ì05z>=o_'O1p~Rc x8ZnSZ%f?i@W!N#dڛSSIw@yl_P!׀UKޏ鏗Yˡ̏N뗥έ'b* 7Z6YOW"nu11wjuJKdzm{ޭ 9qeS߹zrM[] rL;xƣNK#n?dz&,MME^nB[] rL;xC(-nS2[r&M L"F6|N&po<粹Jh^HOQQDɎqt:V+眇%i=";NDz,U-|'oQU. \͝ E\,iꝏt~P{EhF;\F,[P7wv)ށٳt2lnO%fO7bR-zaDwwI|,ŝU{"=Gy[PWuҽZ֚8ofPrM>|`Q=D *FPȔ^W1C\@4Rs(?^SE5hd~=NAp%Xnl\bxk[2L  sݔx;x-j丬\aBG'-QO;y٠ÚG0tpњ=Y#PV6󦣳ىH9,) Z F!do  j&"ؖf>m*NHЂ˰״$ W5orWeD!)0.]fʉ1! Hxe"mmmmؔ w㉚gܪ@4@0 8{:Mc-2^[ 7 US.3kPgIvg3= Wggэ|8YX};EVd4}PqOnZ*8>]Tl.6O Nك{bʥ2)Yv8 7pf_k]o {FHԛ ]WɈpb_58Oeܕ(+bx>kzG\fzvSbĒ綾}EV+^G:S5x|E=dpz$7zL ڀ,ͼPT?iUbMc#&H#*Tz_9xTAtO W%8Ar9kB,=7V+1Z<qQMOGxfU3hD OhaH+g bd j$,'er1%ɟ=~\nk<y/dm8 L_ȳ{rgm9aQLjvݨ8ی;%upSg9B^(0̲Z8,eNEQVs}PD/'ajH*1PJ_-Ǣ(%Ȓ{=[p"$zHxg}:`^(Pа[ĖҶC!ZȚgW'EY>o =??mJX<ƴTnAuv{4:Ur:nh4q"ժ̎6o!m FV *\T"zx]+*4!8x5xSrռL Wٛ^EEݪ)xrAi1:|1DuV1@^F.m/=RcU?&/w K1kU‰u`S^c?b* T#/}K$z,jo0: v@1RTgၔLv./e"NmV z됥= r7VJN'qEX FAc0 ͕LgHFxbEu ُu@k͘vepiX ;l'^\MWat&$5d*+#-B=نe*YleB"JkԹ0VZKq(0pExm_.`\/YSYj0n(t\$ynGζmۂ\xk (xra'JKDB|$B텮}sҕ1N׎Uw(r*D*BR-e Uu*Ynuj!i2{>l[4VXQU<]4NEcމL2.Y9P<uj..H,hپvӾ|亿Ъ>oYUU3NOa\i%o5薺ˎ'od/:k q_xƫ/j {A'cB}iCzٚAQ 箻| `Wfj^ 4T|֫Yt"Q/_U&/Ŀz 9$_W1O?x5V GT^'A7оZRyk-ܝy%apj*Rk|[Y=|9X@m%*sf "{O%vl /!FU [_FjwQ!*a$wQ\zƑɦqܕh+^b x鞃z=dgN} $6+L6Z!"Sh`-TZT r=@ap6)s;߮!b_Ƥi+*vx4Y="-NjbCk@W£,WC'WlݼͶγ>ږ8ۘ!)c|zNcvv~k5Lb:S}k: 8$qnkP&UG*CR6 (^#.~+pD;!Ճp3K (^]8Ev' &co x^TV@ ~ :ד-'>ݜ#Ig'y&AU ?x4R[~L"aLCwg?tN1TKޡ8(=IV(; eN;Xa0z$0'9 8Ia3 (с(ǤRo> +>uk$- )_O-o. 4Z7k葯y,+fpoEHO迿?+_Bu*H= #E;0`X'2 :Hc!0:`4EVQBxw+DSRxemנLapSV,P"kӇB4'4WØ6&cT1Py4iZw-X_(CVדA j tbi? }ϳVC\\2ՠ mc9AS\ytڤuTs oeEwL &yzdcAr;@0&ѕ u$cl!}V2z͙GWKGT+"P^r"qRC$ĤH'Y_ߌhPAWOCNRcTX>xQ uWr.ls᡿Uo߼i >tƠ.wXsL>BȚnC@IIř nJ=g!e5 Tmτ0*X3sؠ"F3tH{77el;fs, +͉9Gy6Pb܉<*g@B^:qlB$N퀓sK?ow Dž ɪ\ [cݕ*D!Mm%5DMt'5 {@F*4 O gK^RC6p\ -7ӴbbqK3aD,ʢ bj 7:5r=]O m b,HI"A`t1tp9ϊv4@jhmw|s3 [J> O䍝r䢢#6hL5Ey@jAב7yZ?{W䶍jrU8-Hj$~ J#Q9 _ɳ {= Ppՙ:@-d%=l(U`|>CαhќkxZu9IVeQb`GX|;2+&2 wOk4aaf2sqAGfn[ O>Mf?tσ͗2I~5x2@=Lնz&*~sFw4Q%Dmk4_Y SZ Ѭ K{rퟹw-%>WO56dH!|$ko꒼'}owQuQh*85`"݇b" ťxŮ+Ϯ_ ÎJfvwեdB2Qrԃ~K`yjX,/ƹ+ Mʟ6S5 A^;Yh؁b~sL͛?X5֗ɩ=S˴эq?^L7йz |5YfɺݻQsP TSTkdVj@D|S%a mzx(dk̙Eֈ%#՞q1Va"X4dV"Aʼnϫ˹_%-(zf1wa!CйU:(mJ%\5ŧyRuC!Pm |>Jf7=Tl6Xh,˄z|XG^f)j =Ub$l"3Gַt /)BD_7QhrsgeeSF٫M7Skb0_..¬^~6$6}Խ0nmД(ދߎ%c(^`q\ݸq5vjWyu"'Ę +wQf5 *#2e-i [c,QMPzh-W1S*<h+.}`B%wK^Fl0N%(ZCMS!4!L'<B:q*%cJ4)$Ia 09H;jac- ıJ3-&5I+1E2Lida u'e*P9%A> F#Bj[np<})AnIrLk5?{#WҶرj+x`UGܭ]}Y3]}}6.DP"BP1Do͉~̉7-9nqa=w:U9A\gQ S&:~y^XkE K{ b8Ubsg! 7^rx?K;(lJz~Y>xtѫ2k!Q{QVw9kgz_8qE֋_`[no1,Ghxdl0kPI tbgK?r/fvwM\}5XoĿ R _.ݮkAւEB<zPs7a\ƣ:Q+ScVQFw~,98D6ZFF=@g^qv/Ƹ>L$ _DO҇oym\bt>`q4Bk@Ƴ:ݝn*r>r{L\o˵B(1EY,@ 'TE] eJ#Ip'Z&c~RJ"v9+Lw=Ь _\x% Rzu4x;? s;Ag&(}X`: ![( x)03Pb-4&T'i $18N%XRp5۟9b@aa#w#k4;K-˕ imä(GqlVGNd` v! f! )EBQ HcCUftD-D3bɃ+ 1L ^3dbG!p,?]g1Rc (581fc~ČҮj;1Q\oBO,_sr1|_oN矲J֖)/;pnrf7On\B-~d9b=S῾L{x5Ch{y2O5V`i4ΜѦ2>>d:ӿ}Fv|En㣍;ZOvSbrCM4ȦO>nnNhU%YFrA-ޭ y&`S<_[F b;\-JbRO1f7\6= $IECXvrZQ-1V&0SF(.u6liihj4|bb#õU`Q㖗8O>`3K+VIV"D 1aq*dcp:؂sK$9`FwVd] 2ӻlP xWշ`צpf$Q "=.v D B;,-PAfE5UGJrU^8"[&ڣ iJRZ%HRvv_ez 9itRs:HKST~q6#᲌ԬէPw]H@._N)<ݹh*F+֟dT8]m)$ꗶhRwp4UQ!E7Tq#K ޗ(mAnc·v`FC@]^0L*mST -uVII<$K5V<s_ ?QliCߞظDl<6a.NY3. S΁I%ЈD_(!PT+qt -,Qk#F,"ƞ2f Sdp;%Ү|ӺdtqQ|+2A:^fQԗk.@%]琫MJ(s xs{·']4 2gR!+1%"VPSorq!\K ojYΧ֛V=R*=cLPcBЏ'=agkN a9!^8{AP;}Tg^`stAUS(=}OOHqDvqSKEP5CuJwC1FYVVv%ykFYORYZBeFځ̨h 5+\ *";A<`^.0MYOQAXÈwg@ǒ ޸x>2p9f}:\-ndu/' ooSl{1|+g!~"V”j#R`/\ [ewƬ:#]ItI}\ ?`O.{+I"lK<?wwx=\Fp#D4RCT<ݹhRF)C4!NmtVQ痐ݪ+nJk:ݕEk{3Ul$hMp̩Y0\k$nPLN<6JJ%V`*k"ʔXM$*) $uϟLVPٗ^U(Oб9ܗ[퓥WGw{WsJbxl!W A:-HW^+zOQRk\ ZJ[K? )3!h.{ DKJ _crCL[;Q)FFE&%Nzh묡W҉>ssgOe^(.u]Rq}qKMzwvWTGY/&}|F0hu|gu8Ro/˸BOH۱dDvzr["$ g/fm7bmBLٷQ'h5v0Mr njűXűX3o0(eJ4=M?Ikb8 Ĵ\HT%I7ןܯ^ 9j"OhC[bdw"|% \cIxm˥^r{_hmLu;ʌ[7Skrqz}p*W$Yfy;Dۦo{UZݶ]JhQ\r#8M41vM4w90 12L+D(1DT*'2 oP mkupb>G6 ) D "QbMlT k\XN0LSrYk㟴 `1vmYeEbR#JR3`Ey"Iι#`ScBHƯBc $2ѻn,7Ainٝgb쎵fz l?!\m$}z??=~5v$ߑwo?=^fi~@,67aO| $zgjVQ}3l;d㮸 !o_ GDqW[/u1c8vIb$%9|yF OjV8%w8!Jv YO-8xV`/ҟd]Ys#7+~ٙPD? :;85iHG U"P%=-BD"ayyV]fW#.^$Iy0`^<]O?ܹa/bvXmXAӞO'>WVL DNx3ü4 o߼w*¹r $|vB%bP6 somIp$TZ'Yl$F3 >Zl`p;:s UL7ǒ9Y( \g G/3XKc3ņko'%6惣mbK<7k+uP Q~4[PݷbwtMLZI.J\ՎVMG?ȤVľ7Rw& l}_&יyf\VgB|UOf-AX GQ q=~0iDzZ3![,+\n9:f%޷3BaCܡAL6՗kMdzaSf3 gM JZ[Lqz;տzzt٭'/W bn3c]~,==΂[=\̌ɻa.fY!='_8> @YsPLwC]Ná@!OZ͔V €a D8-T;l=B{%W NH9 fBpV7Alf߾w9é @XKT}%  #!j4"ݎi]*~*-1x{Ws1?[=QW(c[+2޺Wr:~d6|ҭjsymo(YIdp˄؞$䅋hL@gh7 ,[)9S:F6ux*hڭxڭ y"Z$Sv+/N>KwEp[% # L&[1aFRf ڎ11SͥIIWL0TlbŔψ=SINOTd.lXFiKU=@:4>Ѥ s~r O=kxPЅ*xuyВw7WGsKL41 p$da`&;l?>hzs_)^E6GoH̷z6u `Dsie!'ole`LQuS)=]L ( 6cv6¡Ze3޷[tm=w7>e# M]6ZMC =L㡞PP[jP +P@ʛ'jiũ2^׬ xsNtnK971k&@N{)Gs@,wg1Sq2R4..4t _: x4~NfR57xVKw;ٗ0/w1 0`<UR WB&pdrH^OgULoneg1IbӀBh?AZ|C̹(k騒y#[,QY>c4`- "%Nl"v+>O!d;0}oG{UUTWQ^w<r$`ﬖ+m9ñ*uPX9'@96BPX eII50qXJ3v[ 1t29SV1~8XE d@ޚMh)z*siy!VLVDk@J:&U,b83,' J "D_<k3NB~ߢk/q ~gh68̉@0Ѣ#;W ּSr .,a)'FP²^j b"('<AaaZ" lp<|)Z!H)*<0=r&U*]<]cyBnۢќ늣&:ݝakZ7Q6(c { tR0r(@UBA [཰8+ `&x:jT+W@g5`h 4 oI?)a@ afn\&YPk P ΐ! ;![ԢٙP)h?Ew$N"*˫^޺kùUWdReG)$AM? P{qQuF 9G>$lYS7ˏiM-ZI-=#j=ky/`v;DPX 'rzney3E+0#(ZN!V"؂L5$A0yL .yk}' RSCɨNpsIa9̂&Ņ&&U05t.oqZ%sI4nWS@n1gl2EJ`PDQÃE&ȢaGw@?gژ~e` 'rdԬְ&348mSɧI9~T;f=ЉqIL¸1>(!9;i=Z}*i= I&/{@  k-9z* W4KDؿHlzUL!kiV 27P Hrګڀ>Tr3㚷1yI!~7wox~|sc\Ă0cBPkbF +޻[]a!onrXĮszOOk Ȉp`雛kiyp+ܗzc@L&UwAf>C^eS0?}>z*׫^RSL%hG 3bjgCJtXSY|T5vRNA !u :D8^xuS8)x-:.y N,2lL 93&$g,CŘ/c#!` AE}}Um@V EĬ:!0lY@)P^qeAƶZ`4CÀe<+{sFC8U%%@=Ph$DCg2σU^^Okmԓ4Hcabc D*i*} s<k~dxHӜWNRS(f%tаr% $à\ q֞I}l 5IIVj 9嶡$n$RRm@I!(ߍ3A'܀G ݍwIDR"09XTDTK*9!Cdhc?9 DSuHn_ Xuÿa0A v?(p@ ! uu|d,T> +O`U"T0~08*i| oKITj="SĹN[m" ~73@ /Ggs}Ymu)e&9J !J]A_2K[bKݛ7q)$ן?#^0nH},Q@%CRLe@љR z B)[gz `3 <J_1J_I1*F⏅]1о@ g`_W5 pJl٘ՠ4 /Ɋ;׿x+I}zVY^ȻC,f$q'Y~dH*< mp~?`GDP *$rX9Hs.}=EO{d Ę{B/gmrtQ/zוֹ<.s/Yuk6/blf>?*PB *U(7 v ( qAֆ,agz,asჅIc~|<{]t {Iq:q ^-aHaXcs+J1+WsX?{۸,$18L&^e &qN~D=,jJh&E fůoH$Wþ:&uU bؓou 祉 5>܏f>Y]8Und7gfv>WM.MS!O&aQ3`$'RBiMFrK5#FY&m!&2EBaloQZ, r)Iw^PeO]sQYA0/\AТhQOҋw*ZDRk y . k! i5EB3oW( 3ybh h:*J沀Xv.SG4!Tιf&$ -9M54e:T@Pdec`|Y*OڳoͿ^.f4X؟|*Q_FGrFpO?/nx[KD {b {@CUh "ƈCF}YlX9^fG7"I"f~U8щVR,T(*!_a-?XhN ~^s?k>[gzgxn&1TMf(OXGC Z Y9`xvϷ $QJW%ȹN^#SRV{r}4F13$g!Fh;3o * aom4vou¿_ {GC\h2ՁV])dxb)IWo]^*`sC.HTF!Yo %kfF XK .Rͬ4C6sㅱ&eh+[1 Hػv^*F#-шB Z`%@A$.;) Ss#Q%qpcHiyuUZ-C%B:*LUHk5֧VHk-p<{Mn 7g).ۤ!P9[{Mӈ0!0fG^J-&M@$A|m4*)phtZ輤@tZR qmoSQӱJ>P,Q:$WD %IN)Uvm=Y Uv{&-$ݜ@H>eve]B zQwA5bZ6RC<|~%6ggu^fDpt@ҏW޵9yL"f9fq©ȄinvŹ&j϶%w.oiG'k @uԬc<㗙=|3H);8R(FSJ#Ǩ#(&t0DXooy OBTW#P>qJ1T idN"g{+dZ 9IybOmc&;f{i ԁ DtkORsUe@V1' W9-_c.f&؎TAD DfRk /!<P9@N$q+g4$̀X0dDa(Al:QP !E4w˩)\6$b #YP[ꏭW.?Cc-F+TS{4}o{P?nyR\M0$͎ u7eәJ2w $I#8*)2$0&G)H$sMV뻑J"}}7Sp-Jߘocw 1A&r^8=<_7C&2 L+{ _*%!,}[Ͽ\x0Wv.n!1N$cd̐.ƙ=XZF 3m)| JPmڍ-I_$^nzФwG]^ϲwbƷYblU#aT3R5>7nN g_svq܊R2՚)ku[NQU=bD^6&z=z{&*>SqB&K^7r ?ED;"uS$e"W"@mhp1 i`.'L&BX J0Cb;,b|lU!H׆r0!9Ĩ@Y aKc&Hb@ Prbj G-:1Aw onÖTq)DGJ̩)B(6v8_$KnuMVK3#h諢(}w~'(S Іkr)raXgH/xh®pL.3msX>E1 #Z`.Qr챃@p*$vN\)Fp{vy)Ky'cQ)ٚGGg?㻊^&![P aZvrj5z9 A @'_&ɱva>"=T*)y Kh`uVv}0NϷ|ZB Ue)5򭂷0vWLxlFAhW ֧FҤeb{s:.ga(W;dh%**w.čiR7}Ay[΅Wx熈ݹ>ܒx w1hȅи2XZu FӻNƾ} mY>x}ŪxpX~*t_+7Җ `Y[!Nm][-lVnTTոiH ֵRD>;UVu 5u>V9>}[WDGJv޵X+EߊԄ!6(Eneո}iJܖgcگ. k0xw;%!Sh_"A(';EDhD1])&{9TcrЉN2M%NW;}z,}ƁÅ(QQҺh}u=D#Auli͵>Ԯ;75@A< R# )g{Wk ]Ӧ8"9E2&EVXIq β1O - E$Tj19O`/ow^ռb<5VWPꪞ֊Q i'~**Px:!\h:$xNVgQ}rVʧ*d yTXi)JYYi;:O!)L0 DyeEQ=Y^jB8̇-u%]f9L{,W#OCv)(lvq6/b?wgvaZ`TSEeh <_[:R?6Ķ?;~=_ fRM*=߳dKgTΠ.cg]Z<|R/Oӟ%]ٿ-gN/giJ~'V1q)>-FCnNlMfN+)j-"л7n16+ P(w tbhuQo-~DwKa!oDlJ[3w tbh9GpeG-~RwKa!oDlJcR3IOs49=ivl@jO)O:-pTO)K8jys<'$ Z}G]fwS,'ӱ FCI,3f 0Z Z\pQ! Ym4ۊ PH8w @>Ѵ[mniGRՁѹW +lȃſ:M_C;Xĉ !Ilyƾp w1xE0ѣiA~M-a.)x=vP]7xo͉eX/gk۰e#pb)wv) PM y1\)LGVp^(Ȯ6&%Wll'ч 0X&AɳK˒F=0TE9SeH2e@8 X5KXBWRQ"B$tY\`EgJ_+YHҪ'WJ5SE!^xzb8dYC E2 +4v=Uw]02&6sH?{Ǎ@/ ȼ%A=Ĉ7M tb+GG#ɛ?dh3ӭa{z$XlUX,&Ĩ@-PJw],~62DЛF JR }I}]^,u/o=j܁eZ_I71n&4d$#\FA,>#13 =Ĥq+bAa#AH  z`%}ezݳW /K<@[^ qxlt, wU DoFqωq^iHF1[܇]Pt@S52g8.qVj1|;qݓΤHBRzo]T}<sMbNπUlgY!$q٬=RPv aC!8ESGQ9, Q~H+]P;?=wn~(iW/ R1yp TG 04s;~As,Ck"iFH'Q*8lګΦАvsFGI+$CSW8*w!v&!B_+ː|_ -&]ī)Z_ 01[fKR]JKXJ/sN/ ;PStIgQj ,E뉹.Tߺ}[RQ ˟'=$j0V?Dž/ "b4KH=ߞ-vvćKOS<~M:~\URYo-pB1"H\ B.j>%5$FJ-=ȏ-}IbՋJARIT HBZ2%mEecMK%5@^s\U1mr-Kq0]=_7 ljS$fDW1JpǍMWu|iE&Hjw{}ٽ*u1O\DrӾ.&r?EF$KHRor38{ SaK:vl39{xhW .)olaTuRk~'ET:ɘ]i ),>TS{HsN8*W vTA>AE,+)/)<^/(C[3,4P Zu0 M9ҞTeB&kBIsį7k}hш]K#|o[4kA9ۮxZ[3 U-?cx+HC[y8+e xW K\۲,1 0L&tz-=''((ιXOP$3B~u w7/B9ԇ~ap|lrIyEQ4Gâ(\RԒs2:*؂*_7GE#?:"yIqrDjdF91!пj2w}`]yd-Ɉ~ZsF['&ټڵsi1(V8v9EցAd%=L C+0uTܷ7wWE.Dȩ@ҟ: Fh>ɏ*j FhAT\f`C, ȏF ݟR]tyn;T?n#+I^O ӳ "&IObAf4'S)*RzhJQA eK8[=Uԅy܆ٹ?m=\yON> 碸N.f*@wr'oO6;<:>qҞ/8i/?M~=Cxxgt]6lK*kD{1_2NxS2YdU\O3驗cAX_mlsRnӚozVq#{K9E#h.o>_I8wMFܟXJib\ѦjPϷK^'!0ƨ!N]C.qa[ Lj))^q﨡mu5Tp;3R~忯6Z,.΋M_8]#V-jp h] @˫6 _=&z-9&zzpQ'6 ub#>8~Iv!Qqh$jVṶEc !ub-C3Rq"Ѩ|"vLܦah22.ovDEiED@A<Ὑ`ќY"A˽np:{bxbH($^Q a B|9aIu 6rkVthn@j;l_.F (cQQk -KZ8-5Zj␱_`Q. +\)D&ӖhôT˜L" Ws a?ث'89T(|B!=fosvTK$0Ob?0,dw]Ij*iHݻhzIT? 1lxW\5|e-t34 ^s~Xӷ~hr.>]_UG1mvH]sâ2=5,$Yl`_/UzDW'Y Vlf?iE-340DI6grm"}ppQؤao2v^1S$|-]ZkɷBerJ@̯VO?Vw$w{s5)tEɛXBrLe8Cs)o/^Ňl+U bŏ}3gJy+T|oO}﷨}ͧJXI\cUo.dRB*sA~/7}WU1vK3/Ư.~ ߨwsErX": ?_F8nڭ y"LAbjio/Ano.'͟t L!TI¦;US-(jGTm?5JM][8%b8}ٓ ƋR1O-K4rqpo>LC$?# [=jMS GܕKuwM |ܛ+?Oğ"B+O~[h;xw$_C}gB}QncX)'XN]zįxW~Ϊvʽ}VP/ pjW~h~RUcTAQ}5)v]VG#cCp!< FW`h3mdmՙք6?VH=h}znJMsO(09z_&-㩼|-Ex34=2ܞ=r{<۸2[k` 1!(5bs0 6,-$ʉR|>V%g! Z0ldm"T 0ك8BЁޝ%PT 0E$<8wD93Y0\}f|:G D )f1Qe~ʗ>|];lAe[1b_DmLj4A|"~n'0x!`2P6 !YZBEƃ6k! WNp1O.'8o=8誜` z 1= , Ǻ ,Wŭ* ɻ~*0@]dѲI+0;j{uM+&w^lP7pWW)) JQ]kFfT)5gؕ%5D8fӜo;`D%R(~Vz:ir6^Sg.K09fE/KDz1;f 1֦"&gIj]STe[*(R>ewӝjIad4 cF>JwJ1NH~eCgDw CuP>nnOk%7EDo~,7eJx9eMn_VÏq!.矯.QMfpHʌ }kl2^9W⋨m2"[XQ;ػ r/ޫI?Zo:?8-xbG5:Tn&'B(:s(^3)Ԡ'ޭѸD_C8<\7!D)un'ڎN^;!2}<_ܹ>zU_Ӹ(JQou<I9& 6^EB}vS\ߓɬaw k9G(_$Ҡ' b+Fj))hzR%( 2R+¬es<1ژTht;(I]/Z96͑7)bؓe|͙LS]n7p(Ƙ41sxV/L{CƬ=ݡzwд vz86㞌TY-8*r|,X<ˣŭYcn,xRt# k29лN; \x̮7gׁՃPe bNS<_ uC,J{tEnf_ҝ?>Oa#JV2:%,/qd~< 4ӠOq7?4.2׌(RX湣qeD׫PhνDådZa>wf\E3K߁>UBf&Űep˲s<`΋ރw_׷ JQր17!qhLBVYq{oiPJ}SAp?=*2%*=^%@_:X˛G0=MNLop bXt폷UmǠ4 _.)džwم],_ay}h'R01T)Wt{u :%Q3J7^gaJpTJ{sJz-4MA׬DT$en~*5M)(z=BX jXE;AE+VA\eEHfs,>89'5j'1n>qHQ =dH4My._"- c!1Dc"#sD_Zk-A|2AIJԷYZ}L]x Sl*Wo]d' d+v\C Eޯ܆\)qխӏYY`|I}"'X⛑9t}'-7w''{5V N~9zW%̖fkN0bXptU 2n" Bz_u6z1,??4b&tm N: 4HӺ_ۘ,"YCΒB㕴+Brh! N+DM߁z]֔ڐFѐnMtY!a=ld#!,Ԭ7j̴\]ds#' ڗs(Ƙ2NW/yDZ Ux1F ۨfq*?1նbO(y>^:ӣ3p;ʘᨤ=sbc8^I;Hϼ{rrCiaR-"_m(Ra;hnZBfe}"T qRysVDr❯U+E@BcNc{ 1%v?%*тPg<3P96, )Oޯm{VΕt:[Jz\X'UTYHn| v}s{v(Z֏m(vej퟽ВUUR @?[ץ@faMiDyP"h=2[^Ko&v`k>d@ٞh`=?@3?K!5`WK2Ѯ_h@ZExy㖗A1g̴̖N5#4T֌l-tyM뒪,l)!HsQBIrNPC0mJA%oq)k!ڱTUuE{=ן+>dI]p]q%3q)7<g_8]^Ko B(}}wcS&.Ge,1ϐPkLGN`Ko뵀sǃ!5m{;|P5Vg'Z!jY#Sղ(btFrB9rCJn)V7D))_/rQ[HDgg _5%K^yvHAQbg{, {<%% 4z\\keNޝ2UMҧ)2 @VFu($(sШIϖOb-hlyLhϛ$3:a  |R0)9Jzcfj1j&tЌY!um.[ԆX)RIlT_͖ϿM\yƝbg2mD^KhQFg c<ˋ~1CQY`\)jo-ǣb`E3r0JM~Zi2Q9pEvb]}4;R]oZ"OSY=TCу>Ի[U jrޚ̐dzm'$tߨd 'tЕL \@<<4yiYT/żiY+i]47$⦖,>%M(Qե%ዄ¿җ)p9$&W@.%Hl¨C>p{R!RB (J hTh&THC7 z .pR03\Z`,2\h1J9jl'8{iJi VQŅԖFsz! m8ЂZ }ɨ%4…ǹB,]^S>kʅVPôܮ\j+A'\`z9ϔN[^ u" 2SH:%ZhAl6ېdFvrw7̕xx"ۋ*݁6 ۩ ڤZe\n/[Nm * /_'B[Tw>d뇋sTWCg8jϫ\9\:/ m#ٔѾwk]G|U>FD!MwrJJRi;Fgj.Ͷ$ #e] Ǥk=Ҽ47CwmT 2љMf&M7Rpv!`Ӡ*)}- fx'5JI+D ǔhEaPZ?FZ7,qcݴD) xpUHpۛ"|o_$ #ӎFYPAAEyOzgi 4OW?a7F^kCQZ|\%󺏑7'yi!Nd$J̚qc"g- )faHoih(5P5U Rdx)$g ^1%A2B,.PR19RFAb䕦ib(g8Dz@zcN9'CWlIfCy:sNH5g~L1LA۾ !wԫo;zQԢ Ի;cmha. -Wʽ̔BoCpE *yA^Ѷf_1Cn2 #_B(5fYBсe c"1|>D_C }ޞ261vdY_~C3fu$(2/o.<+_{ Zu@p&=*>7BmI3cyP30?iaTf c<0R3V\M04`]qIr*܂̸ zhF1 E'tx kц b?QRH3ݬ%h:l2OP3aI֮.NJN# XSNw#| nK$] -LNc*)Ģ"8pj{z=-\I6of-pZJ /Tn)kKp9^8ܽ:csB J4Ŝvz==Z|nJKsN߼ӆCqY+PyҜ EɄyF;i L5x)U`[9kG>]_{8Oh%&\}>Q*R{תCcM')ïITJo*B)"YVveOFS\:C`D& e3W |K||UWmi&yYZrɜ`J+YB)ecނtLK]2ì׽>*^moᅕeɹ*II(dke!Xiu,3h ܘZ 4TB1h P ,3$mI ygW^dK+)fc?А2ŊjCH3ޘJkBaFʘ{|sZf-Wm'q7N4KzIs V@`\|ZgSݕj3")4" "+646P6@ Y&Ae*0NEҒ:U?.$sDO`L սȺ`/N2tkLj4݋degv6\ .V< SQWw_7 `X! 5j!FvR(NHWlc-qhCV=1@Vcϝ5#"RIXw7uոY9^qMm.Y( B`vQ`CD{k/HWfһIGtߕג(xi2RLɚ5,ugJFDg yTZ}/֑!Bg5dY#bbG<> U+_+# $M0 !!Wtak1^wrXټ%7N s{E ;'aeƝ g>?ʖQZK&qډ:V@du-` V+#BʅBk/ R+?BiRcqҺ15ל MK\ſuT%;実f&]DZ6&fqd܍TS*WR(3ayVxݞF"%5a?dqxNJ*9JRJ:$IT3\G#_ klv4ڊbO8R YGkFU}BM賘ľ6[JqVz%&(S/9?~~ ^w",+t.>2E0ϝm9ZjlonW<^].Z=cMh }oXʧW+o,Jl}hV,UJZǤj\֜էSw2`W/:L?$O^€^O1UhGm]-x|d0V >mB%>xeIƖQ2SAs|m:"Kˣsi:[qJgۖW_N҃nRJA莭6,~dS*w$Nlж7ΰ;럏 GKBf8K .=?K`ô/^|?.9?8aI3kuv׃sD܌ۏ֬璷= u/nf5qܲlמw\1F>Ͽ<9,zG)XH{x~kdH ,?ﻓg~{|< m l=K cE׹.δ͇=jjFI+^{o Wa쵺Кi k%aQJɼ]ag{2-i}]0/Nwdp~c F|mQ;؋Ufw7a|8(W4{yl*Q/U?<|17\~A?/Ggz2A=Lg M;>g*/r;}v`ga=?ykq_d"{xxpwfZ~wvT+Tz%YIjV~ @˂zʕǭ/nPgyVQX{5 ~KoXy,3ĩr^2 $~fn@}>r}?;9 &4rr]q2J8r~ݳnE,~&7WAx,>Oe==K3ˤeZ;6I}|9UؐgR), 6qą_7 e 5Adkl )lzve x~әmtV^ekrҍ]+O 0E.!;]qy w׃okvM#o'q)M#G7oݢMc\$o !x?yͭDN0¹%xErYH}z EϳJ~tAC4ךT2XB(d ֢f׬Df`@ (F!/)x-C Icj&8z#|zsfAG$c }X(@F!2#bX ` BXcI 1—1Dd`~@ˌPځ&U h*718iaPeVGR=#J*EӒɅ"-DzS'V a:l9N0ah8 ,I 0* bI l+`(`40a U܏QIVGEu}@e(tTV@$we tnTR mbN3׹)&XG1p[PĆJ$bPk!H3Q!)Z,"]yMqMqMqMqMqMqMqMqMqMqMqMqMqMqMqMqMqMqMqMqMqeW oc҅R GFkF⁎XbXJX( , ҈2Ai H`οfy{栫)WeX ]|ZiUХYh]r=/sujlu׵%osS&աLw\\I|ԑ0E߄e$ 㾊:cTI͓]/gg4m:SΎR`X$B(X l J`PI"cPJC*@=N!}NQĻ쾵(*š)<[A QLZ[:`ǑR@-E3W9u앚I}{.N휉kUG*@@eD!7 B`"H~8 I_בć Hrp}~!?6{YzeFN/%+Oq[smn3"] s7+=L2R+xw[ɸF2Tr:!󰭨(^&MmIQ<|xxS@JRWmFF#"R%r2V*aj8Wh NuodL7:k<}?탍bn+8-1D]gI}v?2y'XSku9Jι1fB+V7`uo$.LR;1bKHYi ^*eLy-A8ٞ~T >*a p "vXpku1#pB>5y#w0euڭ9[a(PRIAp eVqK[ @h]."w.85a£~<&Nnc40)FI8CJ(=K<20EA!V@l A6QD ( hgy^+V%yLw1V0"(JE?i2ʘaXVP.9SlˍzaUs;!0\iuLlC2#SkMKL*D#8Q^hF"-|G4Hy04 t)1ǀQW+c$pgB6vduLF38|{d'#2IQzdzgG[:þ?*ݮ%5Z#m~DgZӈ:6ߋ_#S ې1823}nSXFbz Ñ#f]`ڴւnDU2piA ~*nZjX !F_-8P.YM+MdJhUqPD.:Dt a<3L.b Qg8:SjBp/Q|U ;N/(5*k:p̜8Po%Lk$!~E}6A"Ifi4xSb]Ŵ A馿 dR{1tP|7tI{Rpv.m!Ӊ%Fj4eޘNd}EBJ葾}XSK@RG#g$wMzz p&c9v&1Ńz@S1S.Ϝ> aR,o"mDXT\;NuwZ_d1^eG_?hضg}a϶̭,x0F8Fmf5&ݚ;X۳L@ns݌^n,!&cшP{D Տ/=UOd|C>BirvΊ.:^p7|~գ[ϓ3unH8n fJ4>*E.<6z#(Pmʜ₡keK$P ,DKKQ&(]+!=?|'ozq柧O,uJG4o:v.Ckg;Kpf;kxuA&d}S97`ee,gHm"fdy+Ƥ^=]JyͳoΏV7"B޶H^|,&ȢXh2t*kLevh@rfhfE3E3/zY%Ͳ~7' W~f 4ҳoK<>8N_7|vrL3 hMK᫴D렵@a?u9NsyUek8JT6勑^ϡϿ^^dN_d_ܕ]/64Vc!d q`}2\Je@)vh0,߻^$K[2Wا5m)n0wYj?]~?ݑr0X}/W s Viz.W2*l%)ŷYIFpJ_NCv0TJ>]L˾ޖ a>ez2ʯb,!n|y=Oo#ϊL/n>[ ~^/~Y?//C)Zw[GۧKБ'|ҋu城6j ;UйrTʧgo i.\ Wuui;>`04Mo bJYS@|5XH YHlu;ŪԸW-~0&%ߗs4(Wvdjph[#9ǜouތ )P.e9DYEFׇӲP{ +#^*h+Qvz)6bvF2/'3gߊr˺Z~ ѫr,Gx+mW{ WUT/W^@4B

ԆŜqBs˽FI .~xw]:{h__ EwiXzh'&vs.ew/ek)紱ÐKttPn G\8R8Ksśa=jX+XRΤLp S>xcXoGqnX>{!0"$THLa.@ffg6zv%{vj [j>zz{z'@Ze\I.[JOb8Gp6D0f2A 3HX1#ܓi&Q*Wx) ҚZ HuHMzbIչ?x׌U&jfj(j [uP>VIq]] J|lV p xiuL+Ay:[­KV\qT.קrOB>^S c>18m{E"{ҫ_!ZhK!}7'MsAhRfzKzUpJ)*87.R,) МlG{Wվͺ.%KIaJR!x[NH6ڥ#uH]:mNG*6j .WHE4ͭ'"!'ͺROA*QK@6& w-L?Zb..$-8].Sܙ.K=RFI4BAqlxNNǥ#OM1! c[4G"Q.1w/h&U\E@^oD-k8GY!M5kns}f ~$fFo-*!"Vvc=(].^z;hC@l*%e|x߄RdV &)J6_t̨Mc-e ^sJI*.+:x1:#qbr9)c(:[A ')䨽;X V.ņe iIP0J J p/dxJtm)5\)K[\FƱv7(݅nASqD[H֌l2b6a{o Y%~R.i/R$+Ue۝dnT>n3v+u"ѹm&Fx"nv7KnayEQ9VRZQz#77vqi$! 4UZTWzAӄE[-[bjnwRJmY 6 6 Ἅ$]XJ[2ݧ"TS[F#*)e),b=1N0zx)z\ZP0 ][0rP :J&:\ij=z/VRP2LLkF1z}͔ ߄ؔ%*U‘(IǓM\ 1.wB[U9x{a>+΄U[K.pfH1o|\ [Mßhj3K7oT8O"!Iw]δ+*^!+2& 1JZ UR2'`CJIe1%&#%HB߉(BHD.*ܦ);<a!LҀa^ڢ][@c9Fkez+ 2eX**[` q#0A\h 5qS#Y*b0iıӌofH[cpJNnC)c)΄\)נּ޽1ЛHZS1 889I5')SF8(VRCb]k+\ )bDXN,::eqeT lRH퇤Z=?>QsT0й?>}vП.46?4?HlF]'k5?/^8=~˵?LCVtkՓ/ yo9zAay?/ǯ x3٨M?4Bԑ2ǽ(N{}d;~ /?Nx4i *W ٱewg'Q+(.G(;ăzS^?9EP=G?(z]kwCg?tCg?tCg?t:ہPtGs|X Ch P P P#U(y_1o2,.uګZ s8[Kub@SD2SjӵCUpF`p:ZkĥQՃlz 5xcHd|Yptafq,u]&* ,~zVZ+v޿Zi67_3TFvv A1qvZ:LkcB-EoxPuz굤<^ʷǫSaNإxu@Kp>^4^}uUkQr|9 Ej46yc`EL,6{],W 7$A*cFAp~8-ӌ 3Ш\W'j$ <, $Bjxܘ_*@QA~'|8Y# ŨOc)=: b&O3GŹyAT  1,;1c+B{FR5"%V3 oGԒ Ғ$f,A10'%T&D 1O"lStPĜ6kPmӝ5 ѦBpu6 62@+{1VcHf\WRb\acibqfcԯ{p[Ma^ȤsZa*3Qd4^{ Zd}ʉ&bb0IOA-:63;XQKi{{|2 Yܟ2gQ#`n xy~7߄0FP*_a N("W߉.²&۔қdkքˍѼGCU y5V$ ?o1Qpj>NOC؜ ܎(i?+8XE'!H΀ ?E Ζr|M=£N {7M_D/?{[2Y}<S-txS^` ώ QOv>EP ]K }DlXSབྷ17Ou+Wu{V>*}ҴZLTkՑO胏ޞg q`}<Ǿ3t*HAzh_ ^ٛ5|ofh?4TW[2ؗͮrZ9j?8?, (d ^\U2gmIYĿ>e9y/4ʗLO Ur6~yve b`6/?/F,|d 7h&Bc4ywy69a/`g8e Ng{6r>Ϟ>Y_ wP/A_ @arOc8ջrfr<&,?z6ݘZ>n''So#|@fr.?،7O%IػO`{oO^lIAg2*}b?@ŧF# ||);x}8;8`s-7dt>?\*?{aL Q"EhR~a1RGð/_71ETm*m[b%Z-XsLeZ(I|1LŃa~^9?w}[%+1ݧbVr_! /hmzڹ^;]f_z7۹^;kzͮV퇻-^W SݺU~iyzDڹ^7^w-trz y`zݱisv:JAe+ӘIcm)5d"cϭsS*z,oUcܹ^;kzݥHn:msvzN{A6| 릉n:;tmWݹ\+\K$i(3u h񒪹+hrcURץJ[e ng\@I0_]5iRXLV  iKF X!BQ*4Z&SAZUC KJ $ V[Z:QHv]qD҈t]Ig&# !˧5|Ӯ4H;)ΛF3RK=/I/ aYŪSEt{< bBgjI#OJszae$)T^9cp!R8FPb|F`1S[RL-jl7A{(Yy 5KU"@! gbЩtĔMy!Hbqs׆g[sv M-i[ɐ@l fGA0%c>E͇T}D =#=rG@gc#ErHSHeqHyh vp+U5#]eSFHl$Ln R&H<zFiׇ1K}MzyHp8rN"a1ddʚ\COXZ'(SN"9?H__G(|o٧٭p"F q,eE͔9 d*(}j NE.8@w%ؒe=eBj6QX{ܡHˮ AhBuʼn)^| *+O+-cbk-wݸiUEbv :q-P̀ txMoX8%0*ȹ% !$Hp,+b_88 uk 2(YFZ}AI69+K@D_ 7P EӮUmf E!zMxmX6^e TG[XB} IZF(8#MHqtNo $1/\z䐓ȇgg&H xR:ۄN, 9]6 ^Y^j/:E D4HMANXAf <:$-u>8)"u&eʃ5F{nNY"kB) vHZ}NhɦTSвev;lO 脖7脖'v1tB*{f{bhD$l[ :< SY$CEI;_t21EIST3cZ,^/ H5Ր"W8T/&]]/_rӺ&;~7g\y 5~nt {zn 0\nh.Ӝ8@l 6|MkT@ZrǫZ\U*6mڋXW;hS-=b.4Kb-?PG (Ȕ 2&mL"ѧ m 訸GF̅.+ydNwD6VS]I˝#2Y~9oOp̘"C I gQo '#7XhE^D`'o30i7&a0VA  B)\aX`0?8 APM<}*4 s#?ך=n\`X꠱d`'x,rpEV߱a+;E c4i(o&۲ <cxu WQɇQI!֍0fìf}̀7!pC\î=ï/_PCc1-a'Yc(BO9Zc8Xƿñc8v ޤ+pndU,  *er2(hݧ O),x~%g/֛mM^mŻ~;V荒LuZnw}Jk`nSSKl#hbBr24mR6n׉%@vEzŧv jI52J_+TR!A9eTVR0` ߾ZbŐm`KʺNJz&6~f:+|mn2|;k~E~SZ_|7r+oW~'#58I ]}iׁVMbѳp_Ɯ=.z[EmpSV><׻ ^N}JY3s+[&b)nx/ɽdZDT"=8p9&k>|DV)ZõӤcjIg$~mJ 7dK|6MU &K~Psl+^Lcf;ir{˯ݖVRN=^\&xE,dcH)bi׆뙁@M @G9 ܚ Y i hAxE[%g 3y2aSWū=F#SLTcaGKI.8W_?-m t64=XZ;\ j,6O?ZXk77w N÷v6Aݥ]_r-A%A.԰@-| dk}CЖI)26YPԒ9YGbՌۻYsu/,5T5TD5z&Հ8Ф)®YFK8Zi)mUe5nҦ =?nϨ[u: _͎X": Y4&:l ƞ" пaoxL6dNꅠ~ >/oi$N7 N/~[{p^Kqt{ϛ-bPv]bxh m0+Cޔ]veNw/3{}hV8GVʮg -T7[#;C )R$ɲ%#{[66k«9?V6/2[#9w8?70JE (/L!+aw76Tw%dd/6D$l\ %UXR'z"é弥6_O^,9&, <eR p0[] 9tn[N&? %N+p7M_?3 9u3\чް~.mZq{/:cvV j/ 6Q3C/2c;R듩wzYOU G!C,T%r!N$O/lt8`չjtIާzdQ/> Yw)Z=fעܱR5?mS7KH?YZ *<|`Bim?=QA7$?6"%R{k0:q\K,ZA0GrҠDu0d` 90,><0~rS"T P@nW-}3U5CH4PsMѣzM kM·(@l[NLrU(zlB+D3=mi3BmF|Ak(jpphz\LPJzS- dAes+sJ%dz>N:\19N\ڞwʦt* ?v9NT[#p^k' DŽH@\tE=dUZgiȫkT,u|g|G;N3E͜ks͵7]E^!6&Iqe!)&\Mp"&Xt Ѩ={pݴi5j&2^Ou]qN-smʹ7ijdqN@}ujyj*f!&~η|d׷wp381{liI>mzf 8ҭpj8B  5{sf"gEph&5y{umؙDbζA߁&L 'iί׽Նd1,6>a 5B@qFC|ސϏcӋoAhF&>\QJRUڽq*tz^߮{y FyvFyEuxӛ]Ѹ6\n6_|t%\_qUB|]%} }ྱWJ( crWX3Epu8v('Rjë1sr.(m9ϪᕿxA0F +g%l&*죈*<'j \or^_z>\ғhP%p?T8rfP3z(1yE'5#aP8w|9=fT<)$r!`bie,Ud !#Z|N&Ol>L~Y>X-2AXNJ9J*_% !:# C B*SKWLFx!IPPbD*]xVg Y R* 3Bsz(1yD{+$WW1E0(4:@zJN%&ω…'0$ UAQltHW)ʰ :t7Ig::L^SRx>ńPBU;Ft †zJN%&/.=`JfAX %w2|N%&f'i\aJi*M4 (Dsz(1y-+s0< 5F:0&f,)Wyt e-A1%J zC}s(lnX4(}u0"njf%4Ai^,'o9T΃%B ROU%xߚ \G[hQ$Tבp:R*e%C_ >랃&tۻ3lR>9 5XF殟'J1Vܨ[Q9,;4 ne y#޹jg e}d/F:*g5!JFϤٹývx.lJpVYʹRQY 9a,j/68O@Țv%&?4]q(M >:jqF[ @#XZxD;M Tr #(UU/fSu0ҧ_Ͼ+]o 4LMfZAצI_|Y]jVudV&RɥɸpɆ}I[zwl̥}߾l{cM PK/6M|]2/& k~i,Q<:ަ_3ϳ Z9ߞ͒]oo]mdXh$2_NhJnM%^tFh0Hpu0h/0uQ|r m|OUs{M|ZLx+B V LYYeA{јJڑB[FqV*ȥ}LhɵvsP4sIU ,xeOczba蚋!=9Gd<Kk2Ҕ=Mp8e#xboͪ0u[!KN;z%9Ѐm6_jN!Ey"!!1%\c:-&u5vP%ͦ!$m=NV>~xL !UP^72 Yx6G7^3-7P> U I+֪z'Sy>{m_?'0x?%٤uWw2bB!)C./6}NuvRnSFr! Ky#n2^ NQ#ץR4SF@x];~ 0GD\t1O"'[wtx1Wfy6FzVlaxΊ*$$¹[Z-%ȁKZyXOo*źX{cvOn͍e?o<%\ EѢbBJc@ +1 V͛~&n2p0rx&J Z"E8o48꽱bǑD8-;F\-i*,"*G f5@F"!)Niw%GBInH L5$`%>U 6$q` X Bo"E%aBݣ%_@мA䴮.kRKoкS FXc5%.yм93?1:=<)F)Vnq}{x ^lV9`J+'D,*%胞Y+3 ,Hΐ `TyJ(|G8dZDrDizbg coiBLwA`WY KS0r{$u#A{$2⅍^Yd>{l0 ڧepczm"B:Off~8f 8Ap'Ee_({*5Eh(d+R-t0AUNu6 $Te02;nȀYt"T' Ac $ btҰRZ 9#L:2PXÞD'Et\TW϶;yQ灃 QѠ\PgPHCeDŽkx ɄfQtҏaS¶;4$HXScU`g$%*J`rLP6Msƪ{m[8vJƱdRM'kn㈘L,w0[р^Vf`4.G0wBpG@8 Q*v Akq\`ĉhMiM?֑V[Ao_dB/%_g#+ċi bd7*Rn0 *D)܀)jJ%!Dnq<ǨELNǎjԵܵHΧsHr5<, |] X9ERzTt.W^QFE!*% QRsqw>.. ]]tJgi:Y1 }R1 EIyAbM G?թ@AZḏ?|?QV.SsgU Z;,uAVRMzC*akED9ևQҼ1=jmZg_%ឥh$9ni$ NIIV1(#~4b ԗ^ユ9C^">l0,L1VD7wb*6#k)[JpQI5%Ucn)xLh }MwR)ZrFcP^gu>VMKcԂ^N99]mKA"ۓ 5j!3LѺ(7\uz?"+(`$>:=\:F',BT(x-DF ɵZᆭ! s;h@d ၨ"S]{ԚQEgo+,T-`6I;spOW;-kx nRB|"}` a_%u;ZQ!)5Wl # :"nϨ P#fDmD T.-*0z8 XM.S|LӾl۠pU,X+pp0픬v_:CnyiR"iX))@+y>}0Z]{DόћOJkKRJ{M,D7Ndy[V〔z'-{p* %nO67t'$=wr^N0=$*ܬoNP}x2'.ueUL'&rsVanÇxVWί`j!U!8K4!ٻaTwǷu HGb*8׹s34uDr9Hǭ ]]ЃW zV,7BH4$Abt(+l l>ÌQ"/"+$l@N Fx k' rZ $Jb%eD$ &#@<:yO)R W,cʐeA;B97]9Y{sB #9oYAXIr<vg/aV f5;{ rFnS^B<+ Y{;+D r3r؁wLW]P5F`!N Fک!ln D> $JqJ,r'R裥s:hXyXdrTG<97khP([DRO*7%2m+!x ^㤬}b4O3ߛyIRy_՚oBYWj7ceJ#LXQ5ֳgFǿ˪SezezVe5ud8*A\d-%[qUvXy߮LeEa\vN7SFA8{EGԊůݏWtztg0.6} ypU1Q2'm4Hux,f2%NH$%ۻy<Լfwu %6zW@t*noZ:g_ $%7c_0p6 ~DB7xwH>-ո+VRFD\5K׋|vV_|2N=v<\]>w̫ =S{0Wg3x:;{2$vY@<3!PQp Ă*Xg"IJx&3+W߽= тn_h8ĂJj=c:kTP^(}|oYKZFB_{^"dA%ȦiIV jiúDW- sJvQrNC\i%P(:{^Y5;"G%䋽{ hl*a12&ّkK!HeUC֛trf>+C:E %gDIT(adKe-B΃=|=ue%{C MxnB9 izEv@mfaphsţ /RZb2 khü?.R:H(cfJ}nmӽK?(qcͣsʧ{HؿsPT#q4H9[n^Bxh}̣RT-}]|{ew]gMB(夽Qw)ug}DZ^dM!9\d+)ND^/;Edg4\%FIOa`[ݬ~z}erd[;=:]Vsus QXp87I.Gu~7̔ 0CefzjQ[Yk.RR]gɍΆd\=Ocy8w3q\9 !emLR {Ns}*s@ۏpd?ET~QZs'OJ%E9. &#t⓶piQxBZĐRY߇wwi!اfp35.$Nn)TuOPe 0 H(1` fwKra8H-| _P%Hí,,6l9@j2HV mg\=,2T0CQqSiG#l9)Ba-a[!e2Вc.s/=ueA56F`rBY! xCӂJз^-c\ks,JSL2H o?qd.x8L6 SD5`WxmܘXhô Z!XQ0D@/IK#!zOѡ Lpq|Rj&>-P0Bi:pB1jȯ*0Qv.&a- H`R`:iojQB;L X@\ٵٮ[#}ʪx?Maa|̀i |HK߀E;v]CUֿ^thT,هt24qzYjOX~/L;HgOuџjpVWj1m̎,b^^C-p`1PB31>X8c6xFB`"ReS1ڠ@O )~km6bȬ.Cqc gj .mE$#Yۡ ^myHu|":Ï[LPkb1&0m)-A"vcĥIaC#Vg u!(n Jxrm/4BK 4ԥvbKȑ I o Gɘ lTQDD\+5IYGP0 qxndBQ_[!#RPĞ+ .Tpo?[$k-F1 n !P2s*tfkt2rz@*qi+ ;Id^aD[M'┋qAY8]rEm 7Let"f%HQ[}6ao6 0 nEgʪŒPhkهHL\4n :eDNi0:d3j ۃetz2AÇ;̲ ;<;(boZg(qCЭg`|謕Xˈw{$gHh9RyJ(VPVI4TOqspVRg(Pꬦ]K+Q*g+6gu1FGPw)5Q ڝTJ * 8F8`R@Rϖ"ZE1u*Z%9(z%V։l uinxwwsSs!Qrzn"\gϟ8LZ`F34BTbA\@TR^4IҥwSr2βՆzr6,Z(:Vi-/!,c*(9[hsIޛ;3vS=[(_3ѻ9\;e=`?UsړtmV"VH0 G0ɝ2V'Pե]aI"P1)rĻ7?b\zz uF$ȼ wַF(*Qug2ܖpъ`ҍ`jG" ?ߌүE0A9\)M*yc\ [LhD(Իi)Gn1񷲲f=[ ČDL,^omYf#1SkHGdmelJaZі&>ٖ6e/-v0`展 > `˅f[eQ*X+ #eQ{! 16 '6'gq^,z(S?&d-ur6|k{-Gs,f9ws ]єJH{OMh0~0`<4,:EH![ÞD'"eԢhV)Hngx⛢i+Hxjs$9-i`*󉭵t܎0&:R$Qe8Yͅ\q")yH%^i[H˛ +POŝh]ܥsskS;ol]Mg.m>V*!vߏ¨}_/lx3=Ab c0<^#-Itnh',<~ގ\T$C=Pz5l u=|'y[BU6|!T%lz#BIzTVWh~<ߎ2q{HQ@z93Ze.BULY;UZhI!be񮰓|6mk 0yv,:;CX#8/94ރ9)[Y4,5DB?dJ3=Dd% LtcFJJguU$ɥ&YKKZ;*[vnF\^^U\vB ׉Дv}C^fq Rw-wHRNr wpgVy1e)N'Ħ\iW/֛X&إVnת}/o_cI{N,::hg<˓ u0ұR6G㗔0G~/|/QsBh$!IF =X-QL!Y%W]"!cTGB?~J-1 99+e:lhsbmiX |[ ' l,;(\G>޶PS$b2)٠P mP jb%:vQS*١\4(|rڨjyp1IVsV6uueO$S1߮;Srou%Ghtj6u,!sV?+߲m_a (fI҈W.mqٿfmeqZ=bmj ԮOcUb6i2 )Ch$*-dLAljZ|f/KhAH q.LFɬs(fH#{}`e@8RQmBNcG ;wxn2$R@:Ӵ xbd,7El9 ~v1H'Ҝ 4Rn|)oEx˹ȑ~Wm/C4:cȚn|HT-'MZ_^ L9c) 2$vJ+ TjxA'L"e=w%xfwк)|h0K}\ywb_kǠ1efۿ|W]b6kCMnJ[yK6p#RK/uRx80skw_?dIOMJԘn6Ol8L)+T`DZ7{Wɍ /"Tk"}aQLR␳<6ߝnv"QU J2ArP<SA$QqNxX0DhTo͛O^қ[__̨"Ƣ>G6VdA)2WW e?f*eݦ3h@80(}&]$-a*FKC̃I(V\PyKE4[+{ov~3y ׆}&?wG^&`KgF# ] փӶDI}L >2=rQfhQbT/ӄCД|J{TѲRםG{֌0#r_z.Qδ`;ZtNhQzWk?z1Hb5s/4E#zjS$ 4.qڏ]pm=,c|=Ͻr\ǽbo:Qu~=L~uç{ę FK[xxكtﷂ ݿm<0qq Aū&>:‡fL}j9|hF%ȫ -ZAÇ J_م!էcFQDgh90g.Dm{b 3|TNk+͵Ut@z"$֐Ԛ*G2NBm rPƈ ѻHdHt%8d\,z-լJW[ vFWb-Iq*(N*J!-e7'9#'/a  !|8.пܯd~MwW9\J=e 5|r\5b"BRPV&>I@r-mg1zw3׽|ē R`: G_w["Pס ^IGEmFi(CL>W.IL|aU̔Yʼn|OoDT/D5x5"a(y(}"HIp\Fku 9V;xZsEhI12:S_inR;&:ktZ Y4k6i-Wq?J n0<$sDcb2;=&b +!}[)K8@Qjzljדͪxu}*q>YUыݎjE!ZU(@%ȞL#hTu`= "z=$DYP`ٳĠUtQ%k9ɡ}q^S1Ҋؐӵ1WcЮhE~S`/ZVW \EH(Gu)F9-j737MAEQ+AJն00̋mG crմOھ9NA0Tp{,1r.`B? 0E1@X3#(lNK6. 8"QLĎ{_7ġlG %s&G3 -PPER*b DV.% zVg(rb“[mgi'C\8>uzdHĺ^KQ dx ÜLf+NSlhܣc1> FH`R4}AFb0> u:ƺCa}?ųOgj_=\Ƴ/3~g/V4YEV׳wk]ȩ>}zl5qT-8i9YC pBt /< IBO83,V$jZ-~j3/Z܌~~8zs$@'ʿ9:|yv,qw,N3?ڻ+vQT?LGM)L)Qr3W+j 3$qdW*D.29JʌBgBU2 F+Es2j6G 軳9J"#ʼn .5FN'/{O1/ "HN' ^xS̉3@m|X/a<I#ަQ#- 8SZ锷F@|2DJfL\[fm՜(N&lZ)yvt;uncroF>;  R!:.,z&8ODx R:T٦l~l{3OtMIVB7Svv#_`4@nT&@ #mhHztW{GM}rfK_rw?\x(|ԱPh%r]w^epY/ofrƻYE֣ uyE"w2䊪Q̇.H?۲bV|~H¢I'e)j4&KWx'W#&jS޵ј ߁Ic[) ޲}|8G Y'!%T?ɻ}m g70yld,>^PQO^T"BX0||R}'8n2=N*W@ #nvZ tL4yB+kL\p-w.ɠNo;dц,Y,#ۡOrAU83h|$l7"U]|QITy G[Doo#x?P0O,%T8J2qk2s9V)om?ܶvˍE(f)_WlQ.ڍ>?g*Ͽ%WoEbEsC,ZS/&lh7 ЄvŠuj8D.)ɴ[FKgfi7RY`@B&FM6R5A_BX5%2ehΡ@^_"LakC=Q|rzOkJ+wR5ͯJ_o?'"w`*i֋`*^P=jZJ[IU=)m%@ Dᦙڛ>"? &'C:ї9Zfni{qE̽/E֔5TRls{nPɓ=lˆWbk4RaHO<ѠZhI5@KZ6Ph.:Lcg >#E7gW-O\!b4hmqI"%עfPMI.ݳ"cUf9.+!i\w57L˴?Fwce`#MBbZ|jEs߸C@boZYáHn1ZԭІtk[uk4cB(kiM#X}~5q-~(Ҡ =a *4baj2OR@C1GjYyYnu@6E {z󷹳c}?^jHQuGCX䤵r3^E|L LRcvL8 /&Vxlc8VsbORsL$,+tI`cjL#(3] WwfF,RJ޻j0Z2xWBjK!U$Fa˩U%I-儲U[TmK nto6bL'B"@L(=ߨS%y7Glj/=<²}op`h#3g:sdD=g-+ /2I kh^C;: A=Q!JEN''*ɤ$LLWy"ѓۗ1}uFhu\;NJn GE XZs"5b";2P刲ӹ'(뾅LΛgzSk<7LUxDRKjTR;%u|"[%5=6ܾXR aӹポ5~l;!p>z8j'St>z|2NAQ*zE**3q*mr'a -@3I O0*HMyʯ<$NuZ$aw&nj\Gθ&f^cSherX`"TX"+}ijdE7ZZ|h O)šN67 VA qv;9&n-nCpE)0rn#F?*| / ɨCo@6,\ǽbo:mt B2sK^Clx%ҺJLfc s:eV2{v .y_ŧhx-b)gCWP{4FP.:w)(%ܼ qs,[Zv{ 7V*ەVٮA@͈ M7 m-\{bŸT7+)6X[G< 4v@x,c@\н0R-O mEYzk=7;(`A2oOAV2#S.t9֔gj=pQk&tNX_%ގ96rt߻eWs4>Ef\,c;b"[]]Yɑ+~YvIyy`]3oc *o$y4Gv覒U_FFFD1EՃ WEg `i3?OQv̉wLٔf%XC;NvC1'VrB\vcJu+FVm VsaTiUsHPEu~6($ 5% Hdpp+a";J.[f*P % ݋N\?SVѩ+^YEVk:zy3 *f=ExZńԖg):{ʲLUGf.k5q Cr_xc唳f9<GϾ_V%. W98Rǖ.>uA+O=imYI$ O   ` i5Az"o_oS9bE[('xӶNss*hz㱑q#+3xϰRG궮oӻXn^0W-m=w) Dk8D߅HZxݽ2b*ޕG+}B>bC4wbBKփ}F-Ì(ˬ^cU|Ya~;[v'H~sEp6;Lv'~?a ^#]u ˟y'wl]gErO?*HEZ5ŝ*ní[])i+J(V$piћ{cǖN[QtURbZqEDztHd_Q B5˦"ߣd*8nƐjrj"$Yz%V2WM0]ޯ~zx ?uA6R0JdcMXm4 h|'3y3/_S^<㻓&='`=y.Z` !4m4ƶ! ʘ'KBCmV =#1q+ty[tS[nNGwׯ|ׇlҁu]yuxWr6* jCKK0n`RX &NB}{enrh0}@>4f3DVqv5H{_sqF>lvt)·cNyv~H1u(Y?&H s]" øŇ熟d|̧XTDS,95ct=uP+]kAXj=RJ kZ v@%* @2IY"R$vz TT9 P0:u 9lFk146Vk@P:(9VhdpR 2%U!+a\îE]V:vIm|WjpUP}p{)l˓}Z[ɈT4:=ց2c0c)UFb; M Pۇbz;KEoNd2 مc[B8 ڍ=˙xEj3^8b4<CÛǞ^5{4 `M2cۼzǿÔj{#KL 驪20,dy, Tyu c#1P0FkE IZ DuۘF1_OAEsL@ƃWjZ%Ǔp2Џ|!1jMJF㼜86zcDT=117P5r:L4ԻTFAfhas}|aT07.Ћǃ@OIYLbzb=9T 69Y,z=tƷR(!"RYJ6>_K ''fw+(D:9|Z gjL=C % _rE9@b:;PDŽ02DŽXh%P̞\ê#N&%R$dQG.)ǽoF2$(/͇otbk,@-FjW {WpQw}U t|}_2 v]uI|7sZ|_y߭ޭ?H}Ǯ>,V/rBI|)/[~*J~ tNo/|_yRMUGk!Y4ER)=ApBZDxD AbC$K$hDa8Mj P<+p.18R1ze)Ս ψ8T`GpΧ$U J߶DO?$&BNBœRE)C𔌉ə[ڂbw̱9]}1܊nQɻ3JڡAk۞_@&dB磌Yb-m+NFyPR;B"a=eB)`<NbVTc8L^ed'-#^k0j=ksg~`TR 7^dV+\aaϭqXsc5{JB#^ ~LÂUZ58%ʚS-֚_z_VC8Գ᳧>ʔo7 =Ԧd|R3D؆vn@Hp  onLd q~I,i5'vW>)C"BmpV ˯/"wLsQۣL>3 М O;}}=1Oz^i-[dk囀U ve3&l;B#b/Ǖ̏ZQsGs+E%Jw+2\qYR%=-_ anޣ\ϳv_6Jz皹딶; @3Ą|c˛=:ocT>!S̺'fm[6mJqRZ)>`@gErר7Y\3.YΠ`JqϹb֗]1Goo#oHu/ dwj]EP<{CU{Qp&/ Dt`<^uX(Xɺd.Rt`JJdF4"V7!(m@(Ej6yq733g ;Ӊjɷdl"`vn((w{ ̬4$.i#{> I٨smdU]UG<%H BP`@)gR ,nreh md>o`.5Ee82FD@ =mˑeQII[.)T^ϼo3!]K{+!sn3_tZ?Y{Ofng~tϗHEYoQ :$K0r<1i>:@fnJL}+L>Nw*!=f^ZXZ% ]{&tPvp-=Y5:9,siAg=|b&EɔJoY;ԻuwG4ߴ5F1EOat[@&-(upze)6RCA^u%D_#B8` ca$ HvHvF?m^hQ!d.W=K lj,Q~] hV.4EGPDGg~!}̧ÉJGs[nY+$(jAP3nQ1O|C YOIWF%yb:;fn|RI;(r"<מ! NNesyKyŒX!5CP^%k;} zE?|WraXtsoR&e_ (ш# *t@<_`-xuTvr$t冉F0߁.,$]8em) z]oWۇ ˻jnG8|EXMJMϯ1zF|_%1Olr_כ&曔io}>s 5L`$c4]hrVZL/j239v)7fݴ$/(ݺb:]h#>'F홾[J;gDʽ$d@Ɔaҋ,` G:rF!>D>xNhn2=ށƜ-?bQ1).KoiS&Ar"bXi*c|1cwsy.{"`i!L@)jtփu:{”j{x\{;!^ut$W94\5Or sLU CPMD8JP%_Lt#}|c~iG3XPtsQKul{+CWWzQ+?fC?[_(̈́VHR1jHe#v>#%8]0= Q /QoIvJ7ًӻopZef a(VFoSIdqn*c"\zg bt^u.hv;"DJym鼬ygƍϦpxlW<|t-Uykw|Vk. hdG%#Um#Ӊ<R-X QՈfahB;BIhe[s%t9_N.R׷OS~8e|.wem?]չv L͏M`㿩u_'u_7muO*w7Bz}jLޅws`:.@?=;W:ibHMcaa1Ky {OxZLOf I06,*ΐ@KbW)0(ϸg$B'z$!ؤ@7ǻjWZ1BCU(kgwe]& %xOEzlL moo8VA.J*f h6*pJ=~赇]NcwY<%;2sz(xu!#8v!ƒ ĐP'C X`7X!@7#+iL`O/>L_> E*BfE%GBInH LX|F%0-9o'CsE/X䐯 ^W2Ý$ fةş 0~SA" |`ސAPblPbrs"T%%=uPb sTz @>Y%Lw!Ն#PIӻ.^5pFՕapz#[4'C3$ <ᤢʹ dY#HmQVPbX4l7ro Ww~ЅJ04W'&˃4[[Q+ƝVckQPsz(qhK`&1( DB7xuB]A)+h8責ԥR,M\sz(xN9*My" 2ue)/!X<J,R4zrV|#d;#OFϤHGd^|N/oĻiHjGwl"}RT (z(IWqߛLQpMG\8_6aBo-8ؤ-(u?8M!76e} 'n's]kw?>>?zn\oVi~vpd4KBi*%8WB+a ?T=?_ DbL=#ho̖oNo#42 6bAJh+Q t94"w ;Le!nt;jl,YY@c̐!V`bܤp*)EYz1c,zL1`hf|1#- XcX5?lolnjBϞܽIÔ28,;:ְ>ccd5>Ӱ3o4Moޝeޭ[N n5J`/L42rd9I! aɨ(}T[dcE,j6{Z6S9ԢRihNq z+{aL^Qb(3͞^WT!ξCA%C'̧R ָ֤I,Q'i _n&ojR=L^j ϟÿ?A˷fR\pY0]WE[g0?1>u .Lk_߿7PK 1{1_o*=ޘ2^_N0|,Ml~]Oog:ԥXʛ0J IcIտg N0l C(Q>ݘxuYx5yya@D''_~wt6;d&,_|_w]di2sB~bhB:C^}ZZYRkGL%+5L"\R|~}mlr@bE, "l)DGB1 *k1LjU xw': *NѳSL, 1h:WTzU!g z35YsI\T,U,Wǁ[0MV@UkW#+5o~_||~ 79tu؜SC獉4=]}._ JSFTƱ` qƂ6IĐTs#'p.P ,C@8X URQĄy"BSxD4"N_a k*^v<Ƚ Ƚ ,Wu# &վ4bF<^z\U~6.H"ŕTa5" h XeI4㤺1( M{haj38_`e9m,N/' !)Der ypZ& 8IS /p JUj:k0-I@]UVƶ""е֥LuD+[d)C"CsVʺ)H rڅYhڅ (li[!2- Ml 'r^R,`^L #r :1mtFp` h"L,]#>ƠN3kU Y7tÅymf9G@ܤ7/vr7AX͛?LC~>e:6ȃxN' m@׸ܰjz``,l>c!%zqY #T WU^SY@c̐3{A;~,~,#(UQOL),zRwܲ 0T}ƝTgsgf/}aw 2޿}S}ݛG b- ++]`P'QLTj8lnj݃$.E7Is2-Yu8[5Z6d=8:Dz$원,.9FnkhYzaIXo9}grE3=3lN<&,L$M.LkK.rƴ d~~+]`a䧨\qaq$Mփ4.5r`gs'H)g4rsS&`P; }ayq&XTH&; 4Z.%Å#JlB8*0ƪUHW]{$(xlLQp^QPx;0UJ<쵞g.]v~ʝy+YΙ5.UV'/_uRvUpVr5'saJ6Owx┎ήvE8}knv{_&JnxFOh38ٚoz3  ̉Rl85 jsIYuю1jȩmT"F ㊐Bz?BjR۱ ~q~/h9^ ddq$y`|tҎ(Az\1mk^cub=7Bx13[O=SJ[Rc1VD84KsQelǃm2@(QwG&@/za¡.{MKH-p/:mVFÝ0bBk;[>+o8!IXqFD/g$ jgt32qF`4+*3#\WI~ozX;5 hwIcl}q/g-OxprEO˴5د2It>:LY7&L_?Մo6ۿ`) I$v9= bOۜIGm- HS-$ `.OkX6݅ h̟;tljƈߧP#e7o s> Ь׫?$ 8fifaQFMym)M $4[;%.i4f45ROTf nUwA54;/@ɉQU뾎$V*Y5`@zӤFh9K] l>c-VhL;^H_%*ɢ3*޳>U~K|gw cT~n4'csOEgk$q6mo.cd}x 7̾)#9P?y'_=杊8i-Z!"0'TCڬG[D_tlG9o(OWQmj"FFY8cROr~w)v7q z}I8C <Zvq毘!vƐԂ+2w;+_j .Hߖgpizzc=wo zl1, 5RdH/b vɪ>^~t_\Xa7LgݫgWWS^ɽ4V-(rе4`$pT;ӣd:*Q4R#]3µT\Th]mlHnw|?K{s2Wi!R9F<2 c6XG/j6:ϵg[*42dD=!.+QjT"Nz?ǽBփև = նzۮ;KX/32MWߦu~/4AUFr"İdr`gzc# oD#(EFΈ#D '07 A荠ћuZ¡dsS,?f)|⭕q+ yϓF)$ b>A`u:ekypz$c1F#pXfDسx>-:'9Q,9!(DuwFHea/T`%zXZ3AB3"Z=x;,Fjž2 Bne,lY9gDLR. T(x-DDB{Y:v wu^|f a Te{j+nJ-I | bm3A;ReudL^bK!BG)-LjIZcSM;XÜzN)SL''0z:$%C'5*%R9\.'jʃ54+W"jund1^:%0%ZsCLR8lUd*PZpxM=d"/mByP(ZFQ^. }Z؟B&q{+CQ('=qP."(t$:/!JEE9 ͕&(- FTI̢(tx ߛh2MmY'i& &_wY.at0\esF¼2V# &4,Hhm4v4*@/33B f>^yNl~gr6%_<3@ K%gX@R%ݿ4l?~N}9DWt_Ewͼ}r3ܢqn2)cӾ(8`ļ sLxRZȇ` `fe/!ܚn%dÚ+ţs5q

4nwu0b^gũFqz`[I&VA@Wu7P8\K1^滿C=Ͻ޲|?-VZh({Dt) =#{M2PyAk/cL&q6ZxrR8oU qL6t[A8\]rio7FtFx'(GEJnIIz0MZM^z+QD KnOEwp2r}s9Ý,IWa)ZZp3:`=GIXŲXm~5WUg,gE?L=>q1ח\_J=pgxfh=x'z|bt_=Z i>֮c0'x04;ΚwA=fN]d%d:j}irl wͲQ1j笈zl LiמiÔjr hP k-8OZ/bn_j[ #"jw9RޜNztgPy>_Xyfg ǁ7kj$r^f>g CdANT'/lF^\MsmV?q|4s7u#/'u(ŴE{mn% |kX vLY$:\?.;/~?a4uXnCK`Eb0y4f6:=Y8T5!!#n@Lc ycꀟWe,C1&Gb&/7`L11z~5APVBC1JoL!X}CKye| ެ6.tԝ-#X1naHFwaU?7_F f̧e([Ac̐>USE`p5@ H:g5!FFϤHGdع]kv-JWV5q?8ƤDUّzc$46J`*Ŷ 8O/tC8$3tŋʨ ֭NQ]cN)!_ v/x#090ry_YK>91I,YY2>a܃#I1Bb ȄeFņMӪ.29ph12BrT)v+q#GEvc:U~=c{F_/Fd]%ULIR"( LYJ1q5۠pT_-h}(U89HBH.ShQ"P;3~ 狪gBAsZC_au 1ڹ18"QqzGm0] -E$y63$)#D4Pf66Yz9,`MO*ztARo04 BJpNEB d,O +@&2_L'.sq(m%(\keQq/,``ӓJ gPPaYbL8y Q=Ŕl20jD'e8lK}٢N44Wٗ*H<Q,W^-J+-ԩ£r'#(6k4AJtWmP }4Qժ?G(7P|g1Up3sߺٵM'9̺|x:y/g}/n6οڸXIuK TYj}ah l7?Oƿ/S/?Xn>OKgW))@;6X 6b|[k ku'8r88EF g7)?gazys N=P/n*>o@$H<VfQ_WNq '"Q"qQg"dFݤ|EPoWP`R"NTHS Tb pBg$DP04UMvpfdQA+2?w`j|<oӬpqM;7ƒY,XXf95)Ɉ39q+m!HV467$C;tj(x}h7x>[sV9?.ݺ1k"GK&w)0Ω, dLi*Aڊ4EkX ٣! fdJ;jkoz ]eE z2|8w4|rk3R9B5'6ک).Fq$1OS 15zϮn^.69B:uLniKGTsV)9wU b~.o-bas"|1qW/ >Ȑ e0ؽ7y!L pn{wY_q$JHq4ɑ2Ṩ.ob:?g`lx /oX$٬-?jp/;HB" ɖ/@R U-출([y[!%:C0-l;V-zLWbJNc. ˾f cY,XCf@fX1qH+PF$v v]<[ 0 (z)뱛Xd/=~w x3nͳd#RJd()I)EO.ˉiʝ Ο9ɌPEe)a"캈YX_ߚ/׋()]ZܲB꬞YTDwq^E4x!/&/Z-O򯼇i|:ݿ)?lX~y{ oe\dWy 4Ia{k拿#e\Qtw>`"C/x Z#.&%䆃faotM݃cj@]wQU-&ȰP&dSa\̦D并6y~&`j&Qџ'wO/۱MĺɈŕ@*XB96:1LYP$3&m?:J+1[vFef7tiiƥ{MmRl' mfϔH4FX@ok ݠdKB$m~J{Lixsnp}%9%R=i"(<əkwy0ھ"U-A(يi'6Z;LvqlT~7(G8.DlsQj?Y)*s{(h{ Oר=g]qXGbPxu̡uyk/8lצ~2)h Ko۝@=O+iu\Y{_tR\̟^;,na4єӤ-Jt`,]? 4S(qD0iRQzpz{}(54 NhilLk)id03DJ#!Sx D\#GN4"<',KAb_{^"eH*]-{dCHͣ܎SAKB5ų( m4iSgep jODh l ժ-rSC%-Fg.OIj0KaRǕ !-5`j>9sٞ5ǂU8>әZ̯r4*xҧOmQx>_5vW?mvO! _ OpA?簝ǿI-s̵2~\WL9‡ŁˉIoJ A<2J%z/w8@qMQiA|Ѻ6rЯϐbEܭ)` #S(7xKtT'857!3x`}]^L!<2;Lv"onfvy){F/+oi K0WkclLA;tg'<@9/_ECkؓnU:Cw(BiPg넾֡%95~)b-dM9!GHPɥҳ*t 4Uo 0Y R,bY"4T—r=DT] ˅5OrYqu[P_:{7:z WSB,n^>lj+P+Jj..ٺ-vp^x9\Nn]|P"Za5պ!_%OlH`*a/="Lf%O^ˉNJޯ^/^+IIL.g6Θ'CTzDL_xp.X^$#g#ĈzfƓ&K۞GBF4W?(n ||u9׸X ĭ܂/kǪ Y}ȗl cKSXI$^:7v%!X6wtf砥Y- !U9vdwI.%M8W$-ĪG=nPc8^X!͊q Z KOiD*wX*xp{+ӈ^2UqҴIa6W;ϵh_f Ժ؆ϫ&w)0Ω H&̔HS$XnNfTXˆ#{^N#ܸcvȬUs=e Nn0kԍzd4J} qy~R '˔BZL!Ԥ$#I3Nsi btw?Ԍ0E|%d9L/G]ze5/ h= x:?A8qq *iT4fT A-f h(9:#I@Ո"IjOPZaXbf>933i[ؤkt$9tDi2pRfLRw}^w;zߩDZy{h:Tj=HiPG2ΞtH GLjf&.nb n{LiIsrWbLn3MNiZG{RfG8)nW{sտʮe!CbW:L=[! *4Wi+ukl NjMpgȇ#3) YXƛŎ3:RqZy~z9p94ǰ/.<![j)}{nOCҧAH1+q[}:X_?4sdit(Qҳ]-n%OD%r/cEhpo-Y%2uN"'\De6K݄ΰ|fO_=m<VN\֡,o(pV\?7/_(w'2#w3cuҘiWhNR/!"8qDAxL|<]}kɪ$<%JG)QwLz"̒r(J!HJ4нEZ*q^f?}L#UZfeu$}E^~3okm#IBihyL~XنWӲv{IRRI5UŪVÀu!Ōsd-#,m<^]'li빈M/m}el֥ȧ>^mn_wQd/Ҧ݁)Z)0YjcІ:mtQ i+V6C^9E8E +֜i]=Ixlj}HOvF Ɛ(21ZX0cNR)u2ZGe4%Y<0c 82+ (dDŽ:;IIJq$ C^W iEgjӍ;4ZC2a6xL9&xa3Mx4*;j虌jCA)cK ] d BR\0:&A%b)ZթRe^z@3ȜQNW2 `u D-Z^Ij'z})pzf8"/*}SƌXvuٍݻGݰV5/QIMp&ʤP+NkXrӧ+}(,dIA7li[uid2.&&KHNf+sPNvnOds,)Za5 Y?) )md =)(\3" (e#Zj7g8bTJg3IDg-J\f# A(fRkFgND( 02˲(KNJՆ%!Ց|މ)SB fȃ@4,x{!'9o$SUXmr6~bi&܏]6]k{DLxi#q>~-cWNN)!!sPNm]mDL8YjG۪ yTb[Ż%jV6ԩm1,]mmrVqJHI'u(n1hC6ۈ2Q5fZݪmnC0SSY'uCfn1hE6:x2 OnV)Z)iqY3ܭ6mSFuL5.W8U;V݆`+hyfгjcІ:mtQ1=d0nՎU!)ZũG޾It)wTƠr{ev!/wv)Z)fGԍ3ENm]m#A)rmmrVq L_V*mcpS=ȴ`+h СǪBe ZMViUa`)PhU)Z)Գ?axnYLkcІ:mtQ OjݼՎU!)Z)fP<Ơ ujn.'Lx[mmrVqo+Yn1hC6ۄU(S;޼[mmrpJs}eCu3Zcʹ.f:YL+mfZϐWNޜR\!\~v9]er=w7Zoau;5m2 -O~dNozw7WOxoy@owKηzVHw<4|Ru]?}.jsZvG6eu~*U޻+ShY2BZuFi=h P!t$Z6s(yp!DEGI6IycCVDCH"Ɔ*Un`D0Gk"pUObԦg5CPɡSM&Kz h&iiWq.D}D*'XI(c }V1蘵ބY!&\F#76V+v_6[1ǺMrJt^3Fܕ@_/WWwlJoR–bCqn7)^m]>RHA_SEB KﮜL&oAͭ-Z/4lGrs٭Yoi }⧭--[6(].Jco/id94J.e.uK\ZcM.Zaȵhm8].Jcfc9OiJ]W@fq~:{>=ڈv{\>QFa_.Yʵ6iyw˗O?Go~Qi08Jiao}R:Q0Z YL+δ3ܪRQZ /n>m==MZne'>R]/4j-\ WUI嶔g=T&u]l+*~NEn6ξO}ln޾y3Rf|LrLŕ\l;vW)lYnVywAqkcx9+O[gܷ wgh K!{ Gj&mN%BX )OVq414't{w./ˀ*̌=TP.l->IKԼT!9wgX`K,\[nKvGTwxX |D!Ba'qH6^*ADSZiHzF"=UE']((F2c3S9C.$mDt2P⸦t+ d%ZD%4st\֝{^"gZ/d;r%H&@^3!F SܙCO-)2H YLK-. x,r6GxOaNjYWudyZ`@@켗%Ak/9 D0Ta5{gJ6FEE$2RN%`IHΩGJGiF J+9 }s>0O"u% <%VЃAJz~{U(%}s>=$r`ʓE'Pڎ\) Qʺr' GL䩣Z=(~:Wh#u(H71(4(4DOZ`v+Mjoj64yrTz˕! RWMb'@@39V\M:q=`n{3~YS+^,MN¢iƳNQiL6ۃ#4N<DH`"Ad:1.j 4I/+,څD M#مCHq[TEa_/ 8dUp%'e]z!(*>"] ?~SʻN7P:46!8͋8=PFq,JƈF =!&  !1(t 8p)WfA/C2GnN4 *m(9Y٤*4Lm>PfEY5*4F)8Q Cy|\G@A9#rZBb`'-S-ѥ[g̟: jÏXEX]"cw"eEjl@Qz S5X֘lhM f5leL$5tmz%rXmVώwk!}85y^/ٔ>BC !C䲵NVAA=hw$-pO 00¢̞ j &2dٺ=yf7殍ij@~s4 !BweqI|ٝe+32z0,zmG~BDrx즤bjQ4 QŪ/"#"Y9w}H 3I:ҝ͸y|l2_L@"@δeր6iɠ; L0j 4˔v3" lu}hfVyט:(;c6%K@#,=2w3Qs $&O1` s9a:LJ9tfׁdRN~"}ËM#隼`$:r^}Sv~vKĂyz~ok?7.4״ӒIni_c[d^뫳L_K| V7^>+>Et1'[Hɽi)m-Er/7ۥh&2%J;N.I}P\" ~Gb.Nع>lN<:} fQcSfnY)MmtNklNGyW}mc%,D`1_8a)NM`:Lc1{"չ|rCMX>I:BܓJTʹ)ȻvoűrJ8cV/( Ff?RCZhZ̶ueݓLt kfX B $뼱 KBYYH _ Q.)O]iƞDhAΩFcG Qݨ'q F09ʧ$[aJ.RTQsXcRv\!\ 躥vjvY\yy6xS9Ɲ:BmOU>X%.?_NV`%|M0ve?bYwϏA9pҭy,jSܫ}s1 ?,;5o{gXhAYg譿mz}fa$gJ!{}"![zsv慠mz_5Aegڲ% [/wKczz^^9>怛nUCu4@zC.|図Qh=5夜{4c',`]{W#jNwѨc]=kb-jd6n2:{l[AUC ڙ6nf:3mp'~wP]-翝m?o?^2ǑqlLµZ}{vG?l- q~ook9!έ5EjO݊xȢPԢNscl\nV\`#q';.fW'eVS=O'^n:ǂ2h (la@@tO@Pk߻!1N>C%: T7q=0CF^ģW5oZI63/5ɹ?&_^G{R[qєke!R787WGQ]rx.Ǘ-\:=?\EU󛷉ZˤNi_o|b>XI^<司$>6uޤ:Ven]l}5 7\F!r2u@+YRG~S6&/E[| s^tSBDtkŠ~cHt,boѭ$Z6|.S+Mu11vo6pvk>޷vCpS({ެ֯qTwm{n.//~Y.>xT׿ IJdȚ i>tnf!be-x,"02vDK hG:A" kAxȘmTHIekdNx!x!],8-='P@@8`&VJ ayEwD=4:B!,0JF);\AYAmJڬn}$QrnIJTh:zȅ|4>emZX/Bp6Ր-0j;l;䂔QNf1T/زn!>Zm!vaP!G=9ĖYr Bi>ZCZ!W8VՎ@}ơ傂^wKIjXeϺ\ֹNCb`@Cl“!W5t 4<#TmesX) *Gsǫ"iV=y A.4<:$BK솯5AY45= +9:< :#a4,$뗩nyz=7!.ab#Ko{f1&bQc:19gq um`>hC@@7lxS&gk]3pP+ry8+^hyN; C9/~&%?ՉKM=0y#Qn-Bmlf<$ dk>3^j]C/ץ6 4'N#(L1eg6Yvk,;x->|ݺZ1:ffꉘ촟Hݦ/E|ʊ {8E7 U[+}\Cۜ f M=O>()11X8a p4(G3ۆӓFhIL)(["1(NbK^5HhH03Y@p1e)PP,+g̑^UZɁ N8U%@fRU ̂XJ(Q/ n i%Er, d@1Vg~tdehZ F.7ylE &JT)AV=X 5c]S N Z(Xa[#W` Z@eʪx9[&J*ާٙ29iŪlQ$PIM9 NmZh3+l,utEq&Q;YBrA1D%iY4HmHyA4]LUWN%i)8^ âS3Pr OWgXLQ8AF}B!u ଞ)iٵCJ)v#4; D. 8g #lUP6`MG !;BnȍZsb!:~,gj⩰'-$׏BNv~7i6ܲ9>,^\\]pYGIZٸ/5 Ğ4`4X85{Y&qLw`tO$U`iȱ<(iiK3̡ Fcv$ӠO|uhiY-*zE #\p/?/V={ 5@sA4鼶`K`F׽B i?̺g-{_ZZdOl^=́Fb '8Dd/2_u:|e#k,1.H?˂[-:D6?^p;ςk~'%SF'䧠8hWC)Ng&HS-?g2unb<%Il.1?SB\HP<%R>E S pB91 s\~Uh d [RQIE01B'.BVE,l/  zcxajm-J {̩h!A6(Q. .걀R$ =lUOlBJ. Zi w%/aMZ;°ODb`=@}rP-SP*[n^o冺>h fp\mݿ7W[rSp (YCܼՌwڻ~!fP_pr&L\QmNH3*lV'-"N/%fZNg1}u~:ndT@O" Q`czEBY%j$`C6)S6R;@F@9] ""C &AY 6gt)lZׅXgiL qk.}ڳhN( C`fd;^JY3%%*_Fh p($t4HV#;mf.ڿ',^B^8)Y%Ѣ&)o(vn RvN4\<UKso2LA@<uS]ch$2ڌQ&[O/Չc% m:,x2֥=֕"^~SdGfG`wDD9`ԨǎlDƪ:k)5c8޹+aDm'-GzeُA6*[%s6mK3;CH =X МBu$5K9Xr,AEZhM: 56,f魏vvYE "RII#c`ꎅMr#B_vgǤ>ݲa3ݰݞ/mYYn)aOcU$U,:H6ڒx ^J "rT L 0BDx#Sbi"u(kTӇ04pXWP!!qj^Xjr2DXɌPQj!9x9@a)ń$)(bV BP3D'D'NB/!~e]S'5pմ3 %*WYvć㹟YƁQd *[s6K!MMhkt*}7fyխUCb>YosH?PrNInn ͨ7flk| !H9bȳ(g|qy/oNϺj> J[li^n-A3bm?gjf UξKah3enNa-Eh@+1-\2n@u{V1j%D"2Ia',Ie0z0TH )yjQ"H-1B5-o3@J@&L؅#e  bbS|j&)70E Y7"ZH؜% H%$NSE,D|M($DIODoӁ 1ap- !V{D8u)S6Ml<Ԃ="T;(Rs62!R}fd*d0\#RSU)΀ ㄃(T3rf﹒+L0p] OE56mD¬sNx¨F#c4Q943# GiXARSPuZG"A _&ȥF}TOAB`I!5jY.!NM90 /a!cal$3=i 5dײ&\\rv7dhXW \]*,UynqJ#5 VR2bH``-ԎýJ^ֈ)^@ DtaݥXS(ۜ{}|NM8MĻj4rgP F暆$5aU&*CDc琗CKc-mnj;p@_~Fa'3xHum\:Rxc68 %[nbs~Sx'soa9~՛:L໛2=Ǜeoq~̚of)x5wi$%?i? wA֚䀈=~<_>ޜ|vB`UPQ勍qPUhڬW~4yO FeE&͍QF! uj1$r&apU#M )i2Xٵv8 a_4.*}R?cYW^JIOh7` ;R?&Z+J/XơK@)q(ͤuҸnqDvDj+LZD8"RZJ&R䄤VR0t'R?֔غpJ_> 2JCi&*iQe'JrԚR%ޡh:Q:R?֬pKD)Aq(%(P'PJPJ35AҋF)eq(,[{e,f^6JYϲ ?Z%ERXvaUݍn~(KME\6JC)Y\d'PEJ3uKD)Eq(YX7|)Eq(ͥҋF)q(ez)a(e4]W/\ơg\N8 aLR~I JC v~(KҸ32^I]vE%4[h[vTġfgyW^4JVy>A@iܪRjͯ(hm" ]@~Rk3e4.{In4.{ZJ (ܻleO@SD*xoo*d)(Ld6~Fb"2gZʊ)dQ$`@n; d5Ek^e&TOm`?yf_O~|ccccn 6Վmp\:PJr1hҎ!R?Co~,pp6g讗b0]X>c7˄C%tH0u+11"E(5\q dJ0 '!5[ "uab Md ϛ4Q;DX4Q!!UÒzŦ#)&~5Ͻ}: OeYPhdhF_u>5OdR3Au~6fqݯPR5މ'6"30JKGu8.ْ@3i 鞉 `bk̲K֑*̈,+<E:B9&%IiAZ/+8P#0!YLe- .K7M2<܏Ԅ)*|7~! \E+R{z=I? oW߰ &J%kL}яçટ&7tG_+??ya Yыm""\vX+1`DshFf6ze_e^؋x>nbon30]6(Dmt.߽yyvz%\6Z9@tCfwI$g7W)1_o71&߅-&):I^N524J+`0ۣ<OFXQLw(Q*L|%a*a"G4&RJ͢SUh̃c[ZCDaz>b'o(/w5DX.86LG#PKFe&}zm[Lש#*/+Q:y#FۇazHkPNh+H=k-#&1/jr?M#ңbgN'XB F$ڄ^hA g G=2*QxI*82_^gvw*h@q |uY9XA5Ӱs*A2T0-cs.5KCC:Ԍ?=I CT;%G^Z¼x&= (!BW:ԑ:T_ľqwJ*T^/kC)Z)!iɔpK=URgIIB0;hÝHž _QI R(xWZFFFasm+τ'K2rdP&Yh:[]TJH޲`gayzҜaAHr3R%ayJp9z=T3P)Ƒ7ΒM Ig $g5ŚQ@d<.'sŤPc̕ -Fb b>Y;^1?d%](0}CsRḇg^?mI$H([Ϥ:9~U1Qr:_k 5G*x`Dk"hVJNAw䡾qȴOF5vR|.j0t*"XO/6Q9f{kr:fEDXwæe@~t2j'ZQODfWZJN̮ӎ&[܊ḫE `<`'^UT@)?btG;d@ƂGm7O:wmUToQQ]Q ջnM_JHZjK˨JD.ZbDR}ƲzW]CYjH'; ;0voZ6-yBp6”b_y7 [YgnMuwr~20`wk!_8Da6"ލnpI6AQ_wTbe}lk1Oʶ#Åv Wrp\th72;d@`{؀,r]o>̏.~N~|#ͧ|rw{ 6?ifԛIo!H9b\sOJƤpJ/JTXA3E'8{LlڂFڂf d08g(d킇Ww<Ƭ1uIb[1" ~h$?igu Ljkq}kknFŗlB 4n[9ͱ+/N03KkJbޭP* !<$94ݍz35ߑ On2_o ʙ*2SL5]+br,TlBd!e!2^(Ir$Oa%,BA3K1v+QƏ*ЄF*P,p "J 4-K Jq@2*v4a&N$sr<1$%z $R62#YLkuOCl= d[nb$ N1v16 nC]I,, у>|eQ.Bf~}֬4 ?Y EcJd_CՕvT;x2môq(VZ7:מ^ϾTiP KQ<>^IqV/`X#%:#5' E} VE ]%-KDS؇Hjul(+$7oٟ>*eD־%z% :C&9bj3@ځaR x:M]xin# Xu!c?ܦ5rbz'op( mxǑqq4cE[&xZ>epO_ZaBvKC+}6_Z@d6_)6sKD{ ?M:=Q}NJrV0 `X2` T$"Vc"Q6 D*"Rg=r,f-yUgÕM M(x"m4b XbGk*!y*1' L@87IQ^:yG7Ny|οSgngfĦ~4{sݽNͿ["3rS:uJ]5]'+ TUZ*'x ^5R0fIJAфڒ8$"c(3AsPm> 0ԃ(dzs8$ڲۤ5 ǬL'ֳ飙M÷AcR> Ѻxx HHKӒ)+؝ ]zb/#FP?s2s;^5 W (6KP#|B"r["H])Y~~-pz͗V3I3;HNG׎?ĉ&ٍ_ ܟyUҫWˢj'wCJM b:Q8}G"5 H"2)IS'ټDpF<~wNzs(k_@t/sٕ].N*|gmֈ\}T]m>_)AԞ ,CY1F!kI&\GJi:f&Kh-QEPpbK S5,VԫF¢95JjT9$嫚6@6#9tLYDYL81MaTHD8ᄒTDGqSXj$RVa 6J "haY$T0y0L\KBLHDQUبݾ~|92neZ_Αxe59^rvO/as:"R*}Zm^OlՇ_3{3>SBŀARяu߹8%>XRhT׽ݾ)W J22 v19uCyNK!52{a#/> uʀqXޭ@@+06&X)?nI9qZHq.=Z|K=ADPk"_m!:Ll@6kXM͌ % ׻w;%P+; k#EM]l78f>VC @5]1xItx̽yBQR Wu9-;kJ5vBd+Ts8A6eγ[ ."_-K/q= 9H92ZfxM;7&\*? 8e <0q8$E ܏;Zlh+Q5'Hw 4VLe9b)}Цl`Ćeڈ A`r!AW)?PRc/;U\ZP*/p.\*Ѵ3LHe]zeď7xV [ ˕|O]dV[, k/m- ~f51qNw8̓ Br=ð򻋯ލ?_oq^>^'ѽ{HH!ia7JnӭKq=$ȕz% nvc-y\~u).d~>WnbWu(]Vؽzwfv>T9WfNaP fͻ2$GK2.F^LBcWוwRt WL~Wܱlev{5Xۅ7˯GҎqn2/=Z<;orL^WW X2ypkhIU5 )5`n[}~`W6: `[s\AR3{jauDwg&D.e,w-I"kBk{t"%jx8SSA$.)-Ik/I'[z2jIrձW1IܞI ٟsYͤ{2iLg[M2SLwI@С;W(ϖ~y{_>r?3?xrgG.$κcU5U[|Wּ+𣬗`b81r9g;+2C_pSJtޓX7FtUAT}G@3V>7mݪ#Z:4䅫h%*u5\nR.X<Q(c`xM[֭ y*S,PPRn2Bv/VV NAg ô*^_ާnbj-?iAk)UaZ;B>tT#"OhkLi)^KvCKQ}H5zlH}҃0H:RmZEJvR~F@svabuj歋^ži<'wt@5w M_Qtx}3U/ js83>(|yxSDd^'X卽ѕcV(~tz&Ǻ)D: Z괪ﺛHZªHt/ƾVI"QP*Q$ $*yXOeUB -*XPvF,5>R?Yh o_J2 ;&ߋaHI0ٴ5պ?rB q}3][+&H!E9O~fʄזL{ߟ$2WvYq~X~At+7m&R['m;I`qv iٵZD3qy?)V4U~ :Oﳼi[4c WRhAd8[#HHPoWBav[vއMpb@<_&3G/9NE-WG^}ϿW`Ip;\k!HZIZ$Ŧ%kHl'VЦdteI< ="zxjv,\MQ8ëG6)qO )I.IT's28 rD($2h2ej41E)`Kc8&rL RSq." 2Mh* ~, k E >BQV`,_{]ih!w^ `LE#YB6h4r*k"WxKyKJpPk}5|zyxþl; s{2Эa!$˹l_Y,P4 6G.<5 БAo\x6?YNrcơ\w0PӭKr=@j|-7qŷmb˟  !յ#ufVՑKM2Č8!cԾ-Xy E'O 묊/ӧt)TyvU& ;{|!yTdpR+2 >x''"m;PXE}a!0\KhL0Ux8|Zi*$rsLvLb3a/QD!PIsK0-bR)#r v|8&N"mbHĹXH4M=D0RD2]A1pZJ&Q}L5xZ[ô3Z?8u҃Rp( ݰ<0Nɩv3XT_,R70-kZE}4Nqak)aZʤJLNh)aZQM)'-})Z,)h^*Y&Wfdu)t V+^L[9ryC% ^jsPy<>%l"h,MANA '\XZӪ')k[gtc&H g| FZ &11]6﹉s;!/X[Ԭ>·'XfE=,A81*҂/, odKCC8@ hR պNY 'yA  #,R 0f!rE!ׁ DF(50ERtM-w4hd US䚧*MX@eA[@bݳUgXT*Qi8$nKH0+TRH;fS%t\tn@(1EEA--asT. KYtv?շK%?<(iO<4ߑG[Ht]鋸 z'N!^OlՇ}3> ĀAG},d: ht}yo|׽)j(-(Lg?ϭДAsm8܉߻[7غ "t$2f<%4{,z<{ K"q 1L(\q K"o`Ny#~R ZB|3ט47EjGz@f17Hoy3 ~+vTzu {tD1Gϲ:^E{th[ѵiṜ"PꥺX=ʼbú6PmXc]z~)ƥ#] >yg곲uQIIyR)@c&2LG! 3`".!"1D9r&ݪgf21kfN^:ojD;J{&}4ˣ{#Un󳊙L:%: e<iN(WsXS83PA!IU&ӽP+G#W 1h76!@WbnaP $'B%$@3J#I*fX7<'ID2r-n4fzXaF#J Vx  3E 080]$؊P| n 3Ż1qrDf6}ԂI6J]`w~TWVc~WU¨o.㇏K=\?S&3JǑ raJ{V+B}gd.S]v ? ݚ@+h0d^v#`q))'gxI61V/J5W #ƔNM˩5SqCr*υv =9.tws  g3E,rj3s<=9=yWF^?\]^b"HWR(yY5Lo:yڞ|ύr+ y|&TYK·oozF'9fhvcAcA #* x?'e[dM> NVO bۧh\PHlO+):tm}^E=ٝ  7's7}<ßNSNBSnoͮ|<TW3%΍ P%1bK>dOyJ"u0&iNߣ0%RctZ.0c/Y?DL(JMp4H23g2C R$d')d q ɴ[P`O&׈Ώ1^Wk2֭|۾(ǽ.[p} /wP%CPt@X(=,'b-t&@ i\$;>Qvh*2*0# W$NNABY\WCdNWDJA }uօQ'lR:Db[ Spܢ{XDZ/i~M)DGTZ"RWFm}z?cs?{Q;Ӄ !+_U\-SOWLaN rA ,B}O=e\!v. JbX =DIP)fi.T(x$mSm!ݚqݹ-`oC(}Oҏ0K2qDc(ljȘ@D$Xd)e XBdAH")(̱E ( 7L0Eh3+Gz|LeDŽEÒjS8)Ą_&EXFdAJQᗑDRQ`5VjFV4PP>Çjb*pglᕃ#my ”HZ1:nU_&[MB^9DC0 |Ź B . }Ovs!vv dݚ@+hS)X0GL԰R)O8T9Y)۽y} MǺYkp$@X:N`/Lo1cyNw1Vh0 CW*hJcR* qJ@X,v ı_-Efa3~1[t𠿺N_W|Pxiͯ<%?}\Vǟ|j;caO}s',Xz*b(X"ϐ2)#F,bN0i:JW(7V+' ;M;KJO[}]ZQK rBQJ S>pC))M;F6J(JVL{OFInfO AmZ2t(u= P6{*6]i(Ub.(5tD]zii(u4튕GRBPJ}|{FI'JfO}+xBQ #>ST8V+zhQʮrQi%[d{˷oOo;6ɳ6o.,YjNõ5l=ĺ]<<ڔ MΌ}7"BE]c d%\:eYZ|\ԅPKd%JXA āXtV)((}PRBsPN'u1Ȟl ElL31L@8TnS2}R.}eVԇh+,YjQ;b7} $8NP£;_C3=* HsH"wox)zsS 9!6xwa*í>/sN<I=8vt,"X9(;')N<}U6] SR*1nU!Gy!{gDK?f^Ã֍ 4B f* SY\R\; =5Bx~q yL3c"H*XLg*KqK Ok&&< .禎Cj'2/DZm|a`3 NRS$'*b$a8N*#P)d/lϷѷd!Q7D齹=& Cĸ {9ew].~t$ԑDz⿲hó:[!KoaNJ[?-f>ǛgGzŬޤ?De@(ڎ\L~4l:Olp(?[Ynyq*O^i4)*TD}3Yk6(S=".з}?os#"9ӭ:B3==A=ARK]=h|X%yfl]gMqt_VKNSpB:8ݓc;/E.Ru KBҨG'0Qo.;IO;;+QX/ШY,vz35O3(0y=-+]]l.LRc43E0d\|tl'~jP3-YfK)X`q'u[_eLvcfWݰ_J:+z۝Ia6y(* (iKǢ2 BxK4I0n[B! ` HUTH ŰeF٠&hLBA-VM5g2/s`:J4 )i>ܬ,۱z{6ɄB+JeF ,260s/KF;t8x6Z*i?I㦰80AV(a7ocO)vlRc=d+9n{܍U#28CJ0D6\aj1v̖џ=s zO~ WJcJQ|*UmdqBhSH犔+sЗM7ԟF]t^9jT>L Ȏc'NHE]Ik<keXB1C3- :(NpSQ+$1!4 A`ihIRnP1m&:+6*`T*GbO9BVU:hٟCz\Q1eH"Ar~ax] O 3}S烹?u>. 3+L%)5/A]P8J2Tބ'*$QϞ+r%~V*C5XFE,-eH''0R/aۖ>hW!@ 7SO3{*T2=b<[[4Fa=+\xw9>Ǻ?k6ꪆ#w8/B/~3:6ߓ?k) _2:/oN"bȸyأ~O@nKv|Nag7Ĺ&BztyGx$x؃LԄdtD++Nlx =! W3?w1=8"@uVwF@ "$jɉ jkZ()"A !t0\!LWƐ).s8J9Wh6K<ٜ5PNFЪq0[U~w{vR~͢SPT+νϿu0DX4}V!%QQKvS mwiQ?[0Uw3n~s7?t`7n +/qƺیƳ5-)AFԯZ\n^d+83Vd$As9O{h#3Pp!13S !f ^׺)oxZ7UXątPOsv9[D *3Ģ=x "<k\2Ry[ټxh1Vw'T @<~tϫ/8X8m E`=Cz4BLc9($9᝸>SjxVP"agF0i XXZii9n+!UA> gyvOy?C%سp* .fWZ` c=O2`Ta's8pERKl|4&bKsЪQGnϡ4gq]".%}6bmǕf̄ʪ&0 z2dy 0 $9?޽ۛIl,MРモ?2EaN~v;  kcdvRkU YU+XH%Ѓq{ۂؽQ|8W%#D.j(ּ:M>x۞˹r:!H4ùH a}{tr#Nlnb8< r# {iL帤`bsw(r,M~agM~p1rg%j䍖QҢh,EyPP]I㹪H剮0Qn0`+-a6$C?ʃ@=K}ү)q8F>TXHz3U5,VbP78aJ|%u_ q;' VzoN\+8sj 8ow sƥDomFb]Ԝ TM (Y9qK9+N7t"'{D d N/BBw#h OLb gJV1V7DJ1 j ASkXq)Qj(\gwW1ng'b Y^s2۔\iqbI ˇTB {i0,ӧczS{ai @>\"MI0X rhC7&m f' HӶeҤ5 ]3~ht4k#^i']<;wQ rť(oڋ BB98Nhhzvߪ}&lԢr߶{cU,?t'ɂQ+ v]%/@q*Y׊/4sk"=g0c$JǤ"e*C2EUT_4#T5̀rERS Ps}Ұۛ1H 8cf8&W{39i͌s1DpŦqL9!JIOL l0HP)=DrrAaDA LӲmҚzdRӵ՟ǿb!?ߢZ`>8޼X3??;9NէdL;]{m2܄"FZL%SJ0 81ޢ0ˆ<8R'(v*@MΙHx)HT]40}'t8`?EtNqB@%&Hiԓb>%W$DϘƜ!Ey- V\:uq^BY!,-y/:6O%/mˈؤ{_j<'/,$!#_atz\.E- DńaX XՀ ˙`C׶YZc>X%|x!pe#tuSM S !pi2&Br5%ӽsq7]eplCLQB̠,S kk G5Xlc:*?v ˂ [u[3BF^&"u2zQ-n A4nU/1ALZNf J5\61EwmO5pm%ULFit)>T;r<=tKAYor'уڅr-)Ook75R RD9툶Fڭ y"Z$Sm[vaQb":n77F8E]zl(MX(d[?NWUf/L],Ɨ|cj{3eV`z1Sjl|,Z9g F8fQToJ>OX?9oޝb4!D@O+:dRt酿Obusmdz _֘Eoi}ȐhT>XF?F@/N/=j#جA+lS+&b5֞gџ/~QpW[½mFNIN%)#LGs{3Zf•.ʙwQ:ke3};+7[${v\[14D "=fp-@Dam*!;F[f4x*ٛ |!&x|Rk& 2ml7?c.ZU>p#u> D;346BH`Q^Tu~? Tp0lc 1ă!m,9NJ9J`J9WJi  w9uhZk P9P Kx(?tV@TbXKmB&BN" Nb2Q8ʄi!zk4IH>HS 8-H1'rY-- F*\h ÔRa*)ccX!eL5X&%b3K* @#@*@6t=AiT4T}@}!ԕ/H|U7@qb$V@kpr{coP=guS¤Q+R_ Ž/`ۋ/Wu&-Z^f}nCS")`]$E,i4Xjǒ%J%+/͎nAʫ˕Ҵ{T^]B:OZUer?!.Xva&%F2L,tp1U~B?:h?x0|'Kcn]L5'ٷ&W(g;bJ=mfstE$n6>h89XRȱ<@, {uu#2wq)A/R4z"Ng|g E!"Nq,Bc3+X{d]eըx2v]g1 eΖֵ$3g"= 3'9MΐĨ=x T7w !ory#šx$@m{CT*uB "?ұDuDBWDĞ1ht8Ϗ ]*Q\:Y#wJvqZMNAw`L>zrdviwwMqVߞ\ 3CG<6#;^{ΘT գRٿ]^gm~Z}=﫻Kn|(蹨@>a ~v~7/*A٤G^-JRފ/P%(b[Ϲ\R߾R􌃑l-W>jA X~Tܼ~զg̈8:o"jo1 RV~g}>=[~!*҆jv5M/Q O܆=OR~3ղhW Hմ`;mREޭYV Hղ(' , ՟;) U~0M( USj:'w Rܝ)GQK%HzNT;%*jgh{~:l;ZKa `IMŖbLզ6K-Ej y)"ՉR8U aKx)_3V_2JM@1T"Ձ{;T"n~6%}]W"<+ L12zݻwUuU'H }YRyhUZ@ h8}Ĺ(QxP@'uې;޼[OB^8D+➘3׏VzUE H "bYETe+j*kV[Bz :-t&xOT2eaCd5?j0Q.Q;m0Qx }*LTfBDUثk7?l|Z|6byҼsvX簜1|v}m:.?wP(n7~[d<EYNgy, ǝ`; d>},郃}<|qq U*ϐTϮU5j4\?+Dj83JafLP+(PKyUj-8tQʸk8_[Ժ@J :_*PK5{RPJy>w8PKԶT}D);r(e3>[S(GrRNTD.Rۮ;ĥRy(&Dt57Jvî>~Kk=F)]]Ҟ`(}ByUj-?tQJJW8 R'5P(ťJ3: RRǥUFJ s/v(P]^DºI8%1cYrtѿQtbYnj {4J]PGVŷU Vy4J%Wk :4r`[ݛk21UEz8o,Ye\:_(Iu?)6`̰)3*L4M8f{u+yC^үgHYMT;W.kyj;ؓ^4We^&PtˆuOfr/طkvd4j),+N%[Bң1Jp/t|RM8#EDjx=Sf+&۩JHF)R&1p$tRe y,$Ќx ^mp*:)d,z^{E/XP%%A%4DNFJYB#mi a%R YBXEL 3 e_]ɌϼƇ HZY6p >YQ/mov'YŁ)uO_q-w t*Q7aQzZy-RDciuB҇J}N!m0eo>y';<kL?%͇g6W'  WЊ֩ ik{L{9rѬWn<&\o}Q~o߻6yjT;PJnj35 3B+Y%hxo^nN&ݚE&)茖 5leW&>]L'Y[t~g[X]~vm6 r[nbVZ(⩃m.=[j#rm쨫xn'Eb_|9YdяGÔKҙ8R2])jyU} 3@ )$Css6 8W?XM0eJӘh2ӈMmKW`|4ZB} 2vƽ6,geM{lյ r]0L c}3`?7?=g@*1v|kلH\qz/(K>dgX*4R$)0/$g IeF@D0e bv hhb](Vftm`mCf^p-eIBCH8J1Vii`$&&2SɤU)H 2N(=(gV|dј( biQ f>QBQ$K,̸JcE;Fc-Ɵh fFSj)[n4*MqNb24fv5E1RLJFB)ȠYI!kW]nWf^^)˫-d8W25yy & G-|F=b~橖)D<9$j)ry]B0m1kãPƇJ+D9 J g\OxrGZogyL#i=6]N甁j)OwNu(O픧7mtΜ=RiMZM{R!\4lvjV z<`ut*gtksBz.oZy5򴞵(N9mVp KB^8D0q3wㄍ»:݆M)0(wk!/ARbw㥈s@脾wr޼[wOB^8Dc)OwQ#bq;˵eiAmߺ2.H;74ƨ_V::Ty#T1)fR QvVKYɢx+wq<)zH)j܃{f1Za5kQ,q3tdV3|ޕ6#"e;#o2 gvw2 ggTaǢ)NɒM&3SR%3_Dqn>-!Q5cZW%=kP xvРHz=i A$&~'reryb'ܲ ~BZƗm[x}j}q*5=L90q*UvUqSM~*"}|~O񥒠>Lw瓥l^xo\=T HF+Nܹ_j@f9/ t~lTg|'v [yMo0"2b(]R)6T98f媼7rb ld򂈙%[D!MҨ} TUQ;@!OGQnCs,[TLi#A45-YHq~MzMEk[g |)ȒX^2Ih̍N XR<zډq4qr +@f?LhwpL3j*DT"- K#CgPҋ?^U[M7'?L*kgrJG%v\K-^L^ș~&QR0#} z>,$͊V@5r~N>%Rk 3TaJPoHdPڇVOl 380*-'8@rP&,,h \b)D@;JZ-]i՜΀x8uAƧMqy~V_6]seFE'IBI!!*$U"o䡄ɓ>YmEޒD9|9@ Rѝj%_Z]|hm>SgaYG0Sd <- po]6l K [WA:uQD*Emu\[6_)4ctS,2=s̞ys.[9h"s9-VXqd`r"᜿_>.7fEpˌ60P`7lbDA&1ArǝbnpyS䀀3+ԳN!O[=9Y{C!$A |=kP'\DwS//o!S2C `#yƖm-iyHv*EdGb~zn J1qeSRI,U9wޣlVt9+l( -_ +S͊%Ph_Υ~ލ31!-L m\Xɡgd=9C:9CPj;ޥ @I4ir 7+G Ƴgd>jFV; 3`ktKAb)шzN:^-i> ha,!x#qB(gN9GG`h0)%2<#٢\*Q}td"`mKVbV_ )NxOi3q$%0Xy)`-ep'٣/Qc~''\%8H?ǩ*_a=W!GPHMO4HT2a#g fS{O"LOWH9W20̔a6 H\PAk*L,7iKQdAq Ǖ Œ܀f.H)o"g`HV Fީ]1.OZĐJYdV.`Nhuh3TEXM$ECUhkO Q)rbFq\>L`xUUP\KmZrN}QT>{`=,TyugO~krߛxV*9A'KB8ٲF͞lzt5^&[q #N-g̶{2_gi5EOLE%pD6B T&j`Ce1>$U-?3u Оv>  cR9lRJa+ "XK@sV#ސ1nw/z|ЩͪN`Rcu{\ q?t+2cߑ}%\@oĦr9S|uASD7@bn5C~|lFL LF%_~{a>s@\/Z,߸_ܖ*4=U.&wkzJuGl"HIΛxWf@ I0:S  u`C0TyNCBa@◌)lpB-j0V["e'Kga1-J(%0U> 2_.Jy@Ds0 3rVC/Zn/Ez$zqUp,Go )(8Hp:{.|H|h. hdta zUxSꄞ2M2LM 3mZa;g6l"߱<\!7X'1Mя?F,| Z)J}ݟU;`h]| >g:a_>\:pYŤ.u)"/S<4ƚXM9Պxoe !`ōrV",VN_:cG.BuOhQr ;9 E!(ӀPLТQ%-Ƴ̑`RQFbk6r @q?FI* YApY6xwޔ/ (9m ERՇI%O;u1ӎqv.A+AEGFc•&c 8&(@@LE %*A}|L 3 r5[`+cƓuU?,6"Rm~7ӻ9VY 9hۓ|3\Hu'dSlhR.e zݡD/W z_{mA] `Ҥs7/UBT$k@ɱÅ3{,oU .>`c0!r WWYzbʳ^Vxy"n)Aխ:SM wQC㫫Gd/hJ@Dz.Aw%ﶩfTLu4C-iIMǐdne2 MqEJxltv~r5埚 onK/Wcmg~ڪ/tswGxX͉#P~(9tzMT7"6OgτͱH\/[\{\Dd~k&)>VA>Dދ Ss~9np F,)c6JWO!vBk^^(% SeLvj,eS`w UξLy5?|J5m^@ft]xYKdՊWoh_.E c޲P\tj5FNorU,nMOm#eiď7K]Xzn7s3f¢%Txc'RE±ѧH M62{XJ)JdET}ٻ&7n$W㙩V؈1/2pJ=$%_dwodeKjX dH yu0xuX~w1i(e.zOhy!$O?]"`>#g_$[?7. |x"޾L k )-u Bk"1@k`Qd`y |LP*$5*nCZPAq @P{koVYr8K,(+qTPQVR&OZ+WPۈH2*5<2}9R@9^HF%G+fw5HSVõxy2o#1CJ?˗S{}?ugR.ODų w[k({,+xV_Q=F -;M}M S@Q~Owg? mij!MYvof$4Y&SIEvQ6-~t~Ouo'o9kaS(bCҌ {xF0>foU)=giԭR᠘xX0-%%'\ӽ1Hv֢:  Yh&XVD7}+ZUi) s)6%-*YXR,6H&'Be1#0P%>I:G3t[V:)!1&'_ڬG}lZ.dx¹H6fH>^g )c[S@#]qY9Sa ?-QɩP.hyģ\~M5l*}IIa0]eoƏ'>Dal?,aZ"ôDi Kd>]&ĬD(r!`gIP{ 0c-r9|0KJmKBԝ`2]j([M>:W{ q|ސq2+)]V ABaC| R_ߊ:$c/0dt=)>W#.+JJ]TV㕯Eb&#ؖ!Jmz?p?K+Gqy56-}U_)"/()l#,nE.6p^a_R-cGuqD&eAyDم<8֏!4ŇCcYcu.!D|6zq0x;g@y_w+ľd1a. Pbkx^C ֆ4I@쀁%aʹ0rO=N+I ƊhO^ Zf|5}MƓ(8G?qpK@\ᶲzljcp6vw-j%֟E]\|s5FKVmH7q󳂫!hK[ׂcCFnK\o۽DT8.rF;n|;}o;꙯q/" )7?~W÷3|ڤn&\kJ?}yyY%8\,˂ߴT~ǝ1&1c8jK^yViƅTjoGs2jN)&@Vs{l3zJbMKr[k9MEi^9yPO d T#R)5 P\iBCDoUu{?XlŲ]mj3c.ͼ#pxFSYqj$v".C܎aqoNkЖ`>ƭލ`6nDz-)70@(R۸)4RaA#ېr,P ]ZZ\Y ۥj:#C[QlusjŵthsPFKdOui5+fF$JsH& ]tRgW'x[L'5#wkT5z!$x5sNA8A Hͩ4&-!P|OՋ.Ї#%;^I"g$aY" KL;"!ZnI1lfJE뽉{)%Z0K?3?aA֞S`K*yxVPoQJDؿTq| ٷrLxNŶ1!11 + F))-GKk(N|H 8@Rz&1ç{n-%tSNq܎X_D6HvY#:Ԩ3zi_\-!~)~ r3ϱL($BNY\s<j8AbP2dd<;Qgu'[G2unB)3D& ]J^i B;'9h*ߥ4σFI`Gΐ8DFV`+m,bwQ1`vH4.5zqVX P͡V8d \W9%ȸ#N$GHZA%e:u^Qπ&$cxp`N"UZYj1S .S +)B/@!#L𑹁u kX=RNeWtQ[s8=+~|[p2)iwzvT㵙7=oΟO33柯~Ĺ+껇/JCKxu7/.RNxzU: o|yDl8>]_3ً)*ϧ?sǓkݜSt{O-x0y͋P>5XdC3AfPI㠌'P>6O M#<\bu8+(ETG>Đ@ F hߊڏt5tXCq mu}"t ɸ$:¥@Zɵs}*a:4Ŝh~4)V5GQnVPJɻz0Qc\YHՍ^9n d0N3sQYAeb0-V.b[r,猽ֱܤ PHq%Pa4)@QGl wdYl[*9œqvqqcw &1>a58t ŰnNay*o#+oߥ9 )b<ůzJ \I\EdL)\8?}^̑D7iEڇ8xB8j;qKBl\W+Tsnv5G4g:Í/Ub||Sw7Q?<ȑ.H?$ko!L#f.w! m"`>#_zOحNp&S؟Aq H1@0BQ!ogl 51&Yșt)M#边_U*%76 JLǨ|޸Zo[S%%fS&Y jC-t-|gH4<3&{;khz=ߊVFf3zpΝT7K:|G%3bI%U`M3 9H`$N0iFXpHw&l@T9"̎.Y)Yx `%71oNVl%^&IRĞGZl%Ч=_~kM@#QRT#/ \셤F:BskI V6"ӑPG k #81"nj0s2 JIMe%rXBU$ZcsJc^0|uB1Nۻ0;[-)WuoR"%,- 3)BB%%:D =?l${p l`` EJ=y[ 6Lf\E2.` 9a"^͉ѫ91NpɗZ//pt0a҅ò.,Km!8X)$J+J1XlQrn˖ںՒں-\)y_fѺK Kt ?LO~/njQRYYEl*QK;\9٫V*L%[n@*P- <5&$ǔSwcİ7jVP@BՂ[;yV2rFq-ڔp0$:OH^SWpБ8dwdiL.ʂ%'ڸ#$8ZsiWx)SƢ&.xtKmhkfPGR~ ~睪Mtg5F iy@Z61 Nn̹- NYY( N.̑G2 R;`G*r~FBC VN4iLA&Om]9xB$7i>^2[5hAt(O#+9'؁q!Aՠ*m0JLz/nt (whgIVg_Zz=kA~nHk[k[( =@X(sZb= 4xC?A l +G%eFІGC|(XD!Sf!EaAS8xE0켥A˭b HV a"g=([(%WebYr4,ڱum1v"H{V@;p,ʑG*P~S>j7kz 9vV׌Z) Ui"t %! Fz#|0\c-oIp :HAD  :5m&q0u>Is!Lb3¼D!20wY9:{`_](r-^| KP#0ކ;4?a6Ln4}Qۻ^Cp1,x[%v(vshEudRydDy9HNRCo&@j5-3$=' 9 <װQV_Lǂ}aeTQT4%@pPS7z̀k,3k;m7wy:2wynT^iRr|^X>"-/x=bhV2{xyr[߷W}V,nq<Ыpp/֣F3-ew6*Ϧ;o;Wӿ|3>,xS(of?uYOGh+*5q`` ֭-)v&z@)Y/jݺ\D+2xS)w>*s,ӷNEy|,j3pbg˦ kMy򗏵Uxf .t%qѩ_{%8a^s]BQb z0h`kH6@rOҋF0)$F~7GͥP M1\,E!i# [8hV&`}r>< 89 ,Y d Źb&T=Z`[q[JitzQu/޿S+oE`&^wyi0TAG,`\Rl룏y}'tnruVߝ2I3dL &$@4|߾^fnLZ7LR8% ۀӆ+hYD' MdÎ\.x~3S]`_ պqԜNKTLL Z ~d/wvbd .Jη|ǿ@HVn GX!Y1֭x Z-Nvŋe1%G8R~_H-ȗP>nj9SVg[E{4K!H] ExЀCfDh y&ؚkN=a)5eӂ&|vZV4=,W5s1) Z"O$ZgiHM[S8NҐ+H\ƿ5O^2WxT4eu؋,)J@w#TB0& pF<k!9 *8 A#(1jbPh2[`y-a CYK-%aA* yL5e2t,K̀G% QQ֙SQoL*b>$jvch&+xڔ?$iYVkT[. c;'XNL\q`Wpn<n)A~n@ ~nt/¯^`p7m& += &,58$78(FdtGvU=Ҭ&8ɾ 1F1m'p|Mm djT~~V/F道NB(R"OQJTQNqpJ}4[af@'2B8&4QBD:NĴ%Gz) .&;E 9#BQ9ejR4Ez,/mZ܁ӹЦ5rk]V L.#L X^y*L !Thb 0 6y@Fd ;M6/!Hd$)Ϋk' s#:Wj4K`{@n͢%Ҙ -c]Ivy(@3V+",y,q14̑fs= uxBաP7`nĒJ `(1 =ER pF !14Up¾:)x1A1'*˱|,rZ( /H/ʇdE9 E^6$$6*0C)`$x -d&Adj-$ .P'VrFq$ cGb)KcD2 aUrؚ 3,{7uf11p>41\_$u:QeOT#j)Ăa=" 7~ٻ.QsIZf_,{0I3i|{@nѴ)$h[2[+_n<~2NC|R rx9xçOW CC,Ϝ̤1--&c{~zTGG(pԯ#$lzd8f(w̋6+BAU:W$=q4AkRM3ާue98==ܠҎ-ܾb5)@zd2K^I;vEqW֎rax$;>ķq* a@fƊ,DxE\vVmm'X \7mjs{>oj ĔN2LSVj% P3+hP;A9w@ YÈs .ô)v+yrOJle[o䖠;+k>@/fPكV9*8(B4oE 4V_q{` X'!p HP ScXLO}4 ,(բvZeTkw?Ocfl,n҃t? 2Q[] 'ql n JݔS†::؄j*0*kmo"f|#XX1nSMr 9N\CP+0*c 06ˀ4!@xathk6_NtQl#[W_L5~[:sieQ7[y; 33;g"r^>L .1M^OX9/dnefxox^[޷WUW7uA9v­ZFK@=-f罫=?=Y\_ beQcrBQ+sls9Qi -}ڞ7N7횓׵k>T- :57d!Kݺ <#_t\gK~Ϸ߾B0ۤvSHƎbt \f䶄Fl f[%wGãTf1@^ zO3bU;fbK9.J=8ɥ 9*읣kn JrձBAKJppJ,ɯ0?;tîl_JVNhd s,[g vj筠f>|n*WUN [N@ۅɨK!8aU맹l؇iT:J#4R)7λ?W~M0y:klieAmōr~}W EDNe[\xX4E9šʶl %!s?{*%LsnzqAFF5Os: +֙oÐ5>hm a]*97 ]uB<{ |_Lst_9١]~ⓞ_L'ʰ/B1Xq1gr,X@JJ"R (Mt[#9Ib=q`PJ5Mx}^nGM|^c%6s&0rf>g 70Eʿs:4>o=X9PG' K[ @`ò!̏!_/}K@sѳA \z o5(ݽ#>`Bbh ]J޶t06UZ9WbxR .$aj,H~PŭrclVRAini*—Aiאs;>r%R(,[6W^\VAھ|M5!;*]CTE?W\R)^m(qT^V&NG& 3)7A):áw #Wb0HNR"%ˇý]6eZ_ |_=z^j^ 5uIcL231f &N"aF1SJ he8#zbBhvԑnI}1Μ[y<1X ^mjb$$F ȘYSP(B/Y]L1eԜsZ >WfB2I GI$-x :+& Nݣax|(j74W}F޳Q0fv?YLB[͕___5dF.Х%W.\,{3;B~qAIu9Z+rO=DLYBEJYU?̭'7%?$˂*,䅛h-"ڶwS#zN6x*^Ttjw4Ի5a!/DkؔT6M`Y=rc0h3daKKcqvD_%9}G$>.a屺-xD41q)\AX1*Ud.mCyat2,f#.mQ s!8P&9ƒaʍ5w2n@Hd5N"HN̵C*ܙ yIM9i?wl~FqQAzyi( iSsOCAS -uOD)nз짩: =g3 JKꞆ*('A'A99\h#S%t_NJHtۛғx6V'rwb.)zErhO'-:H_d^.ڧ|YIR+W?Hƶ6gD/R \E/%$a.%S/!'yKK*p TOIvN`Z#~I󶩲sekNh)64VJ  qݣL4E*F N4M:*ADes@ Z =o5p !3{OwJqn/_M/[ f=īaoW;\nP-UB4yc80ĉ-L/H@0~!ohF{.߮.^_$c_?|n&s39T|o|_]gwg0NK7K;68g$bƑN(E)F) moug,VQpefWw[sl "$$ǯ?}=~ik48U8'R?1WO >ZsH-͇U߮u O]ϝv~}>ra#q1rX'ȪXaf$X3KNE)Ov15 EI(]jO`8"*I5|oe%ݍl]6t9X՚poT"Q/?$'v]>ǂN{uv٬XGu,4֩Ynpvծ f9 lOQ+ryDZ }?W粶^88b100BRGVV@rDT#PIDOAl 5# w b{2ieXİ̲H/sǓFSꥱd!ee\*WΤ,`;FR4Ǥυu:!u]8$W&pQ5> CT=7\ TYkg4[ 3kD{/ AѨb0gaT#l9"z80)nRQT*P"RD-qNDk꘩h731f$bJLLP,\gt BvGZc;"g-'ڧ/UεjJzKjY8syJu1)cZ2v\w:-5lnrpV▖C=k5FT'X,1Y'*OZ]] "RXɖ6#F %٩.]@TI Sm(!Fpb1.w/U,cŌt FvkÞmU>*JכR+R x^ J&Js#F$AS)'̷t׬ D#s-<sf%bkz#f\J̹`x'Q#!!Ii68lQuc' xG Hz]jg!$hrDylF_~nO#3xR]oJ)MҶrs;- D(ۻ{ͱ/ɨćBR >.jۡ'edGO\ Zjm0?p?qۛ鴆\`Ś+R݅oFq1(v~&"&bIb0XFVEb%"i|gŦ t|y@R r3EW1f(⍕ݷ )6 d1b2}? jjjj\b9`n 2NCZ k%RHF"{(J'Zr0$-Ip *7Wʆv}h}@nbM8A 1ALX`V)pA5?!)haDjHb 5"c '7ܕ&HH'?\x 4?Mf=禪p`3K!H-^3qK[lc ;LV$^$[îZVsa's#$sժ^98]Ww.5G|LIq6=q9f&3cj3'/^^\Ԟ}犢 x!^IvDxpaRFcuNԕTJ}=Ìʼn荂w~ XXM`JE+^_RS@K})5&ebR%ne 1UB&j`!&,䅛huHN] a wCΜEw\\5zU1,Z_n z8k.F#\KiQj@2`J a{(!Sk3@!PCR_mnH/W**}H]|%qܗ8Y6-2$.H.1]BBIeYbtKOw?ݔpB^_H4?_{2v q<(k=9mM{O?$: qw_2O2E<`735ny_Ժ{j{ 7jEuzlP.n#Z)SMm +pSZc(W2!_5 {C1wwtzuMi%uBg! V:QTfN0^!h][Pĺ:.zx_WV8',v²\RYpRݬԚhCtKgW`ZZG#ǣn(┲)5fJinI#7*ޡ'H ʒq[9x6 1)Rp] z}_\>,S }bSQDG#58릯nv+m ?Պڧ1. rLU__CLfa%4da*em+ ^)pP7v ‰#4Y a ƻXLpy`J{ҥQ6|CNx>Ed.z 6WTCX:g/{1obr^&&_BGgTtxF<;ՠI3c 0iBk̊9**rRxhZ38mݬ96ΩSBr >0[^ieMP}b|}O׮nowOʜ>!2mgD޽]{^@l,*8s޻f$!qh8_ dD|&YR0@f,z{|Wo>.^9*SS p@3μC(ʒo3S5[dq`-DF5rh3*j_cmZI+6B0qh \nTc\-w\%‹`oWrlY6zh!mpռߖsw뿼r\t_@~wުӻOjBڎxZؿǛ_U̻1Tf2lKHxiI7[RIIMD@ph5ێ?zw"mx1WqÛ[tz. HHʥYMƋs/hb9j9t|8]j_Z\thw@1Yc>Ma:'7nPW\R7MDMPf7X]9W+PLm& m-"D~Eؿ6ejEl~΁~shV}HK6L*JCX<\G:wQ)&<"?&,cȏu\+/30LQf=,D* AswJ.RHA;~4+/U("+ T\r+TM;EK׾w6ePP54F8)c 'EN,CUTc446C{aڂ5)K^: Fw mJ@+~zf eӆx&7jUjd\Fh}2LmTzF I9}U c 9C q?/#z/Ğ+tו})\wq|i\ֹ_4 LE8^4@:uӕGa9(N?y$Zd!j;xZ :ۈzy_z@Fc՛MB -\o8s$®K^—'$y0}$/_1"S8d%jrZ- Tm5:Bc~^]6]MƟ$crjC -KtTp@g1 ѧ]B moWTN>]O;k|]]t]jnz'Rm!6 ~wqwm%\yyQEJٺb}W1EH|u==HH@\` I%N=2 /3[D璁|LJywtw7<̛:Ց̹U`geF`p@ŗ%yVUl~uv?liX2?e>cK}yRkrp1}q5a v\\ EzG7%_msxq$ˇr):|Ǯdͬg[] +TbK%Tb>T}ĴbľUiF) {PN:Ac]}. -#Gc{q%>o9_R]Nq8LH贜w1&v7|{frSIȳgg勉lsyI؝}Un%Q;9  +䟚ٛ_xPiA] Z?< 5J., ;XpLp{q3S2k{P^lߐ9Ԭ׾q75Zid#rVT9h0}.Eܿnyh-wy w84xͻD]v PV4RH8l#Fb1vNߟOC5n{c,C%pq}q0Շ7,<ώK@S-Sh<ez "!3~wݗ;icMШj 8gVkW͘bAQ!7"U`-ЬicCˌW̚BaO0@kdc 𔲥P,(`_H§ҫN3۞4sa$suVD5:n@yOynt_g&_֛>ݎ~;;S0D;Aκ=͎@LScw~)k)N ٕ,e&E[]L2-%~frǗk?w|}\]|狛Bk}Zey`g~Q\yY>xbz4e2aB+ -ųN8\|x|w סsWTZO9ԍFʛQD@idͪ #aCP1g;bV󁤭ea5CgmWIqI֨j<*T?ċZ4Sg\C1Z&u5cN[eG@c<7IӸxcfOʞ~_AUVjٝ44B/w69/ִq0 1Zf4+F Hcvtk`>~+GH("j[yevnaN?5>6 K̓A D.>'amGG:rP, 0=O;- SBrOYk"#;+ǙJ+R:4`kq+rLud(r6F&`24dT*K=岱 Bm_ g_9, Zgf9Y|ŻYP|*rD]m{[Y|\Z=0(Nuh7crvc?ϭe8Mˤ` tO~HBboۻNGXˡ&O|>'O5!~! ]$44G PHz.RC2]7Q]VgMh#ft_(J=||߱vV@1?h-ΎW6{W M'CqtLOrm栓V@=\sC6lB"m|ءrTaӜm6p@(x}dINh'Aӭp\㌬#|a}1p{u劗hcA-P8Wt˜޸ǽ7WN*&0el9Tцd:N ur&T!oƕZoBqNqcad+vD{odԙ6{(ՈzTԣmz싚<$gN^52!4y$%oh]gu=^爷ݣA'=ip2[BTDjA6] w2ՠGVhaeDݒPg 4amp4nv\n#Śӵ(GLO>B1Ck3p K3xp$"3UDLopn'[fX>&"g8 j/Mm:(mW#P!ԾI9-h9Ee1V $n |:oM=Do.dI,#0kC% RZ٠0Y*D hǾo8xJqϷwrq.X;bLN HW(`tԆ!3đX:XžMuOi?-ՌTr盾Ok5cp֢ r}{xgG`@^S05#P@^)$'B֚ᜏ{|̂ =odϕ#NYlڏRgK'oC{IxT{9VH Fdϳ2ksVW Dq8Zr,3G^pPq6!as+k4Bq1gt-WkeANm@Mq;j_DLw~Z&ߌ)J\4Zcwz%J|Mz_"ofI#i{@V99TuL5>D"JCP=h`9 iz{ eey QMZjs[2|A)ImC'nv6 ' %'i?mNS_Ʒ6]Ť.T.u]tnz^iuھmN;C?g!!&I 0D ћ @9X\T)Sy|XeV<^CN.1cD87ʂ+D` (!'"s:euk$C}Oȡ VM=BLuDL嬵XkX1*IJO1𦅁7- iaM )& ko%, ٶ>i(DP6H S!tF'wϧ*˧*tFcoUl­7ݻ,i9 |Zt#+p^S4"[ u=jIqKmFStj0frIAt+3Ʒ7]ZJP.qJyId/SϚACUcE(`eAMq3@@$2$#x,iEЪ BdF?2˔0oefEbLb,P(7H),҈SӨTKMX)JK FiW"?LAYB*< EBI (diT g.]TL129^A 1wĺhiJW f)BA|p27BZHF&*أ$S)CsY"FrÛ6 L*S*RdǵI9qRD99Gz HNuF%bbdWNy3zAE/L2yɼTf(uFf%[Ae_%3C?e+WĂ fbtf x2ݛr"R1PbZX2C\|" TjiFxi&%Y)W><-kctrw^y [ce @v I\PD>>=o̩9-=7}XH"g?OϙM_ݦůxCո{sW S?@&?|G|g[,M/?sf?\\]^wGꊞ/m(B}͋#Hu0JXyv.aZRl{V =vC0@`8 WVqݍ /JrIyFh޼ڳ,5C Rs զh][cRml08xZ-eX΁ h0#Lk 1[\Xn(`q\Rf-?4`9uݴxD1%ji=p<SNjgI|!IZabWVJ-crDtG5FbO%S()X>YifY!eV T*dk1c|+-Pk:WzR_PfzJe)rBƮ+^)T:XzY%l^닳rVLfv.?Hq}6ԁOm`bS_[]eqka)5kjcq@NCR$CO!(l8+r4@r 9Y.maxZ,՜r}]6PyȳR 'D҄*6PjzQ&ޛhXh>k0--t1~h<&gϨB)륓"Lj ]aWɈT{caɕ#}]E'_3ٻ6ndWXz:gP_T-[INvŹ$G5`l)+R)Ә1̐BqWqmD&HI, V(pWBJ tFZ`E,%#'b@JI, +$**xzWoxMt΄s#a=ݼMH鮓d9R|gϯReOi)NXщNK86S(B%^=1*)Bؘ ǘURO<cD=d>f<kVP*0S1\&~' !0InuXONN5'vUH].K,,.3I:ICЧx9׋~?Y9anmPAbJ2zϓ]Gbyxkt"a@ Z.׸ 狺 Z P3*fq0+_wSxg?8HɍPCXg{%3()q3 ldH>[~QL-Ȕ6*{ ~z7p)Šg4>ǽ2A:մs ndb hC"d5\-zڭ y"L):`p1 )a2SI1PEOkL] |}XE3ItU)$sJfEư99ͬY.vc!O]1V6v͚g{b}CWӝ4F5 BXc;*n@?Ԋw.ZNyY'˵OqӛҴ(m{W u<\w߿oTQ"|\hi"]DD`5-4Wg|t;_,ۻx4yy晙]7˫LXx7MyFpL 9, +NgśFq(ՙF RO*WO:5RɇrNYoU7:'a.)nys~j¶ jzig~F^(tM4w!o-MĦM\y\6?5;jʓqvG`x7{ywn0[gߺ9&r?|V\:^|v 3>Q8L`P>oЋ I6_.c@q:<[x[ [6glSyMam2H'HccKbttΜ/T /FH[h& D]%+gP-h$S2́E"i^匃 ac]v=W=V%ܧ;gnAl(0)hKyJv[ID1.yx R B$iY"}03`sl"|j7iI1HZ*"ٻX0/IxW6q}2!j*a\b*IQ& Pda|TF x?0/t4;;v6:i.,A֢'ߓ'qSz\U+]]z/J`^ )//~ǷW?|?p,|/E_ØAݹa-%y_|:rMq]?T+JGt.)ΐuХe ih.X`,eɜ>2Kf$eVqi)1,v$Мr*N=iG:ɪًTj>]Rh:сt!])VyKL.L `ʘЌQZ̰ZHƍ"cJkgck  3K ,i-eJI`7dX )hĘV(rjnt`"J6{:Th #\G$衿àT#G%7K75eRS,0t) q a32'+f)Zw1QףYϓÄufÓn(ʨC&p:ѹiMocZY\$`@ .92#l0ԈI+`(s`>L˱-vHsj_jk=%%P?!lަ ib= r(<p򣄕 'BN~|z&tv-^O𲠇CnK2Z mQN146FL 3FnǦ#7Ib͉l=ZW+YVCp?Z=%h8$F!Y+ n# -ⶄk xkk3 vůE5:}t;f^q%ƹK&rR9i̦2 2J SFr(v1ZdPLqOwє`T@nֺe 6KDJ PDd>NH &MYdr+)ZUrT+%DU,!9chRڈ+]IuxK!AE4J<nMiZIs@ADjD4T!!/\D)iIQa'OEᇴ%  `̧V<.>hR]7}tb,L b_'?'ח$.o>#Ȭo3~I;}ɚ(au"b~m%UVe91 ,MՏ]z1j|lGCoK׌WwkF!&Y}ElW}CtIP 33g2R# "iRDbhR'@Dڮ7BrQ3 1G]nTD;dXnBbӌ%[d5M%*esa@43 x^+bbJ uNG6"0pI7<#N SP] ZLlHT`Ш+<0GC,m}t_ 0%Ƴ%M2jDwl[n{A +w߱z7?Re*gPU΁Dr*$9J(mC:xd$v(Ĕ!z=_V_J#L|Ʉ)' `.RT#z]M3!Jy8){3*Ʀ*U%MRh"T0lcA±~XwjXȐ~3+{3/8z@űs!zqKS92t4a!^[\"ӞfIp3/AJK'}՚\ w^cٍiI '7ZSqDQss\%O& /)vNX8.~U,b}DK&3W3FZ,:'EY 8/?ۦ1)0]#ަ7bfl2u-iP 5dX4V<R2pc £Εi \Fwr9x(sm2JP ,('-ڬ1*EF ,mv HMw BJD)fhrD8<\XyP9[ bVe@ OQ&~:W(X³lNR䢄/h%"RGG7hD T  YRo~X"AٖD23*NH5:e~EGTyqCv,4$˷jzrțg `X^Yi?6&ZB9>nZNvݍ0@bMS|Y@$-“ {b񽑈`(q9@SVpf2F%rdgAjkkQGَPq(6j8;J?ʀ#F8#d=hq6SlZ"$gTǎ[=vT1{lrhU/FG컉m] ]Ǣ].PFe=P]s6ypɚ -{€1A`"$,6%HB/s;(nqdCyN3=nO<3]M1lp⿑]3Lpjٽrܻ82̽wThG DJ*͏dF2.pK'+ڟy}J@q&Ӡڣn S6{;)J򖍦+zM5^P{5./A+yǣ3Ry لkUw8H~Gd ݮڇ drOuqd um缰gK@ˀ5z>|e Q*,ն/k@:}#1lż.\Wd(rEDm0-!E[nziAQ E"]m~rLP6y&#-#yƑ^蓮f jr|@/YΝ) |a1Bs,9f) Lɲ~?G-7cl*/JKF9N1M 'V xDŽ܈FPu-AQ\{/I)P`6RjA)Oi4PJ!HZr=CR]Hg NAz w]z0#|Iw]㞇J7~i6?+b <3o,@@ѭ#@ sxCwdC9rY.q!o_ޜ.!@(F'Z&uT$$i o Ӵ"pLk^NU(#Saݽ@i//-I$*nD+:D2 t~}h4+8@;0qK"&8<%e.QK~od.sEܣ9wyzZŕܹ?.ߗmHhĝU69r(]:ݡ+=62 PD٠f7O;U6ttT9ӄ\#>J4Pz8ΘEB&f8׊;\JY2JNV8*٢ Dޝ 6P_1+)k\1mJ 1f AH4m䘇Ol`D wgb.~2ORˁw\dAbo3>;.ۏXpRyEfIaр K9D3_^,Om<25s ʢ YB dp_9} ?7X5܁(OݙϞ* S 9Ck CJ6jfpN-`Im/0%O˻OɗeAqzɟtt/~ I|-,K1[Of.-`" Ed<tӍFID!CUm$7jrRiDZc0Ǖю p4Ƙiʁ5;5RE.w!YV#z%mvV0ɝaed sF7Z 1NH=!ݰCVS!aw;ldI\G$vbfԍ (?('Id)FpGCVI H;W#l3УH?HD(* H2JiE]ZH(5{ʘ |;$gæa{a,XmAS)G~LSD~"B03Ֆa󈣳a 2SN[k=6] TEo]xE%xS}o74wZ8y  axDx$Ƌx7ce/ @،:_WævU-τqQ?B=V.3tnZ={xy'O/o/޶no>aS[ J~ۉ~lX.޾+rz1SsȪ͔w5]L+m<1ռ VN(^ITňUy={z5M d肐ՉMDA]b+y~y\ɨ]#< =ZO iݶ>7z8e/P2>;q7r3.qh2Fļv1bfےwGX5Fn6c(*DRg۬vYQQg_ uV,#+U!M^_\ZCr;h~U /FWIms`m. Wb`,=ST$RY̞%1g`Ll~E z`6PfWNrP W486c>R.)Ա/$vݻqEFuf/<#>88pf_: bHY:G7?!Dٝ?"ߗ[-ڷ@=x 'fAU@jdKz$Fo 1p!hddX2\6:A6."^OEKsP]}7\Zidͨ!dൎnc[4JA;EiRJ\# Lj#ׄ-Ylb [H{_TTr L9|XB؈>X4啠G ^cGBǯP6ZICSpGQj|fjUX9oE>2ʯgXx>I)#D8:ר$猌^E$ĺxx^W"nVLTz8[diZYBYo0;1"߭|#u(m~}0uÉ%')}Ӌ7~ϟf՟D~Z{˲1LHU2bv<+DSa݆Dj 8zI+0_"4#t_"']CڈE ;u%_s _.tSG]m5Z0?Hl-pDcPEݫ1΄^/e1TkHxp}ّy^GruU~䫛a&6"ʎ:;9jsv]%\wy7íM;eAe@~Ǒ.wo (c̐+_5dR(wHGkسiǨn3wTrѥV%△AQcK訴ZFCJѬ*) PF)o>C(OŢVOTU+{PcLI렖^^\M/@O"I+8,kJ̏|[XkE N('+Rt^ E"8tܘ|$&ts@y b &ꮸWOb%/q2Ax ~U]\˦MOX1ܜmbRtzsQfo[Em ?F1e_%҂&=#$bI,'.fոO!厣wQ_R442~ʩ4A1Ԑ?ŮƺfKіXQ.x ~xTCXKg2TZL).@ DrTo{ jszEd>ت:?)+8))R4{"{Ϧ v˺cqՅ$%^]NPtj gv +Nt(d *O8F|M`x.PAwT!]"a$PF"(S#{G1)y12{-Tx-7;f!S'#@EcsbsD6sKE裭10% MG٭d b$K.dPgPk1읕[94?>Pf]f2_ӷ?76?67˪n_qXzTWy]pwi޵ ~ ?U˞nn'ϢNB8u/OsK2cbI,3r]et+kpFPxN- oeSļx& @@aAc/EEipuc&Z NNrW\F@?{ȍᗃsy)E.l6/d79gr"}ɖdZ[Iv3X,VէÏpsN.mhB$967:E%hr4Eo%N G(nG V" vF0T3zsT<y$to'ʆDrE$Z110B)?K #Pd ;~[$Q7بbTRD4 Z}O,$gf_.*ZUhuWMk^Z9PLX)Rrs[bZpe E%yDُr$^Noi*N.Qp=L#V fG׆ݹ? \ȞqjÁKy6.#!C %j{M5Bv(99`؋\ atŹ%T+]YjBpiTF[ycF T8' JMt/Pxlކ| #E%PB$$#lqKC6$S =_D|KA&DH5:gA`a6S+9{~SI3w8A^![iKEO?|3XP`aPE7z*Yck\caXY˲ 0VxG[l2=В^#+YGG {(A6w ԥq>I[)G^=9DZ>sk#U\MK|#rM;,%}r-s` a.N.eUH@9xREQe۔&S&%NuJ WZSQ/,H+-5%%aU>a2˜Fl4Q`tRF 㕣 s(0P͔ >j-X"38:i+ : %LG!k"ܔBO1x =/&Q䒴 *ŁvJ7Tk5~nB }&7ڻkyEBobf{i*R|eڕ-nol$_m% 5q{pјY;#s‹í80=6Ba*4[ea. %ageC;k`nsE刚OIkS>4=eo4z+p <>&Pm'ؓD ЬxROO3'׏nH8 ד\<8qҒ?5RF=L3jiWѲe_E˾jZv3s9Q,a^ʚ;D aΨeuF~e4Fj`zxo<=7~Ղq *OR!!pĔAé%qTb)J+#ZdOrqcszmޑmʎg|-Ip@Eg`#U&[([6Av&94 I;/OdOBrHPO-p|w/{?𽮚cj׸,ix=gq@>! x?1Y 8w;U+2 >zp3}@w(c0~pw_UOgwY L+uTo? _NwPc0I@=|كO@E4<ԫ= „͋y\߾!LZHɶ_wLD&JĞ'_Y'T-#b kPKRHRްiU`4晜 @lě 7])z__>#}X`Wێ 9$cs s1>4@Q2Ύ@M[( A+{_-zcטEenn?|l"LT>;_8^"- iHPw,,8%Rc|چmkRa`*RpuT^whMRԖouq6(&gxE%EMR8.TNOwmigCb ⴍtMڳ*ӓJ='w0>N:CDJ6PyɱzL|Z{p<&Y8+n2FPo9BHT+jP:Dͅx7e1xA43^Q`ޤYRa[WZ SXJJigww՛$i˖92THj'qcpe*4HqşZ8(&IcNZ'!RcboMpe?6wU3hmń~:n"-͖~N'*H4,+r# '"rkWJ9b .Fue|¨ҚDDC8l5lޕp^obGT2l}q,bz_A(L'5{TdPNQaKkn|'Zqp'p6$`@p)*Wm'j fV'VϸXQ#x_pWy#g*@NF28!M#YHeo /v@F L]M=i̫f \/c@1Fv*iej+YTW'7GaFš<6KN"0cљBH! xB@[.`Ŋ-XSak645~t^Z=ܺ?~Oz[Z]`JrA%ֆ"̛z"NL CG@j14NiOߥM<NHLr*_\B^co'6r>!Pa5Bna~|PlqK/wUrW]5\s$1R>#GxIh %8ްxl<}qp$Pָ܏m!OcYHYIԴ*"Z|d%0 ~[aK&(7UANuޕt\Z)U4-?(ZጰHF( i&48VeUpB@ժ,>VyQ*JeUD­U Hy r XkaCPE s I⤟UD05UI׮E-<ꥈmmkK&ńJJ7<&/ ǂAqI')Ix3*pxN;w?7&dRB3H9"\&򃥰Ѷ%G(lZ46klnb$hڹv14m#A(iy7rp%Yi >fjƛ<l1g< _޵/4QC,lմKD% X{q6c;9_Bg"q9lk%Oe\ ŗO} ]loׯ<.v@pA?ܿanfwwu}./.hGgnfu~yV]YywD}%? 'p7kc+n}aorܼf||5{.9i\Η0`ȞSmg.$V-9ذk7yA&hb1TE*&C[5s`bN̙l<܇i&btSڏ9ȶ^gM}rJ qخz@AXstkx>X˄D8(Cꢍ ձg-S`.uGv,e ϝA Dr='sF2CΙٟNE S߶"Y 0"gV`UoRVI[ e^FɂjN7@Pb7uV=QC}U}Ƞ$W[ztݾedNlLyx[Sӧk{xw#]m5r9zt-n&*‰ewsSt&U~F( GpG[~nu&`|-2N{e0FƇQkϝ#Ek8tN4o؎2o2l@ݗjX5SB.a%ՍS'ZQ :ыni+mR?<<IzURHةF˰]ebEXWZ S+A+ed0B3hǺ߷7[B T"A^7,2g1¡fY7;ޕ1h ӻn}~]>T8)W bk2o<_ƗWώƫ/'_毧n<{՗aߛDȎ>YC",䍛hMq};wS+z\N7xE_Ab[6ޭ y&eS >\n>8+FvS_Atڻe37n-6E'TZ݅tt*5UUw/ϑABE)SZG'X=> 6rdF9RzqpjȜ*GhT58DتMώMD<ȹ0"CxLZ\ߵWw?:70 9!Zr?vձʣT0!"Aes5L8@޾s}_ǂ2gvzy9bV@_^,>}/~ʧ.ޟyĆ4 MS1\k̳;|҂%&住vM8OUV*]ku,1bǀ}ﱙxX!wN{uP8J-9*>;2 T"Ѭ,46}ؠMTKf9G#K+LPJó yǚͳ-\ _ `p܉Ns!wmmJU|Tj"ddPӃ b 6%buOOwOwOSru1y }4.a#aNk_~B1clgoL }MH:Ktc{_~PJx^HFEAiTGW>i ݥqoתp1/v{A wɌfqk_44c;Ɇr!,`ZtɐEFbO).gY׸AƎᢃŖE-6A@Qf-,Eyk#§8ChLehPڂ/{_H_7-bݚ1^4 '(g5qj@"P-'oDyTF$-jhNsyCD?ݘѿ Yp|TRJWW[  bŽ͆}{oVS` h|hV&&Y6%,ru>)7p-rJ\(X[O f>R> s(>O(ODR uK5bq9:2gcr(Nloze8jsEI6WNq&{O eêe90¢m"UP+Y>͞G^:u5N_)o?[:?eesERdC}WJ ŢR?k΀H9H+v x3S&>zWVnVx9\d{*V~%E `dC\Jq]'+x1*8Gc0ѫ! ;+z!ilIz3Qf?Dhy=h2Yq}| AgqHbW)?}`di/UDC+ř5GN('(pV)jG`fВ_fȩFHU QK9@)~}-ԥxϙUwW` KBפuSbQ۰-碍pU/'3Q税G5F>ZsTHK0;7`yNLjjvޥ ԙH4u߅6!?)^J^JSgopme&P!D5z8Ş:'6j  L%4/Пw!T…]t-{C 0]&o2_Ҡ_N(K.١F^jBtvB=ګ^ګ^,Wa)pX,1aB$.H@ AHm!F꺃/jz)*_jj)@&yF/hRF/h9e1ǨAA+Z.I4$A J {9xRoH0L|k1XVtsVV> Z^2w?p1mWj~?ݟv/ߺio8'oSRE74qYfGVX5 cRΑaNu7|ja@>` ɩ::i& 3x4*qÇV KXs FqI-7.Rx$:z1>| C3n6a[N.]v9`ьKقn666Gj&j"Xlc7K X q"5H:8ZR/K=1# \=I$ ^a#ghd&cYיIl&-x@~0󧷗Y PF#lZ4pU\bE"'LR,NK4;4Ќ(?on}D gEʂ/h ֒R#ګ,BO ƞ.*`ND0`#Pp˥g‡x5D8'HFC^QQAFAp3`d`. D!MXZ NXVA81 HG`] 5 *{o<hf2vӪA?9)=<=篸ĉ,$v#?M^ѯ]v "@(G=̿ p4qu bO?\3!;3~ D*h:BLݹ+' w1/}qK` blciv"=kW]ġr44_r >T9L+^J,G,S|/Dr%(r5* %` 7K(W$`\R)?%ISĄ*DZDuIM P>՛&&C$:t< Zvxw5sR%;?[ y<u[(C}A9(Mrxڌ._ogCٟ|6HeT[G -B>0xskaJ4w+ >>*drk" JMD+?Q 6Ёae$ppƂq (+@R8e\x,Ŕݰ_^ONJxbK!0 ŔI [ f1dn&SKeq 6DZ!jPԇa]l'hS@)^?#K9WciF2[OpV7U)P(C<5c3sQn,ZSDZ|j9Xv sջ!ZNmSvGmedrpb5[ ٯ ~.jaTߏp:ǻZ4>x=Y`udu>\YJo~{xGh_6ȵ\zl<\9=((8H󦏯e3k6>"ƤKwΦ{&ք=wF&YV g6vOz}Էy-'@|swR у86?c0V7 I5sư=&!p3#LZm͇>yF )ɽ_N>xG~/sr̘L0f"k:uhkSf5iOrBo||7ߍGg,s=!Tqjl Fn,~~za+6/6!9>+Ù5c%{\ek$פݸH3#ݹq5k*L!RR--^[w}do$8(EOM,4 :D lrF|K)$`*9;ߦ?}ot֌rԎn}9Uxx)eЧ'yPb'\5{N"HkWƹ%PJ*Fau;ic K1G 2w_Eor=Y)0;?wSؔ=ɬ~313.O&yՏbɹr$~~M~:e0.1g/*տ7ם,>1)| X[R?*=;=0^q0K;HW9exXx,^h^(tsu?Ckc7ψ%3tw%^L^3 rGe22 cKK//R  (JDTQaiYnҎ qhAxieuuF`B$SXtMS&y15j+*\\h]˕T҆tEX饎s&N"g !,&VD(GrY}- g0\3OS3`Ūf)YbY9LDJKrM('6CÁ,EѦ OWDFG5K XWbym@Hf9jQSyqFhf aq+arWJ)F ʪ$͛l0ުZ- DMI!Yl`[DZdli1A{{GjA)N}Lhp[_Rନ:Ug=zFʪ+Iwe{eΙFT#~گfAvs:@R!B; rN;xl#7s}l68Ͽ)SWkEҎ2xI5Ԅ\!. KBރ }/prڳ19){!'# NVVeLc9|&RFZd-qÌF;T;g< xkFaƁ:ÖM3,IU1[ɫΨXO3k :lr-!%4PV mB0Bh'kUo_Q\H gMՈ ͸"HNj\X9䈐XxfccBn o[yIk'4Y"$wT|6k5\ {x1*I7Io"de߯NL"Q}|M64>zFE%Z[<ʑr9*u.`) ;L iMteʚ_aev. l[ޘX϶ ts[-<DŽ&Hx(AMJ|yH0Dj5R)AZq)6Áx!3÷v,x" 1ڒ]N-A^ l[,^8>lbtG7۲NYt~1zKӛ $*p˯ސHӒI-j2L}^ "2Dz ~pƓloEK!|W;f:zѕH`|$pm yH2ZUeXf~M͇JDuXĸ$Znjev&Ղ@#MެE mSI"c}4ԠL/Uv;>N?m'YiIoqGܟ$Lw/!դǼ29FN[1IScK2ڊQe}cXBEEVYFN=+zm7 c'z~ |(z0SÖNy s]IJ,XbٺfwxvM&Wk8:r!"j7XD[E J .hd5^e0g.r=} o|HL5"U8ẗ́$ݧkdJő-s\fL5Ojco}8ٚE!_5JO'Ya">^t}~cNqʄ.cL)3 ؄l;׬SŘh}U$jIZFSɒuBhCZW{g~r- P=[Mkg;j5lX c^LE2dQNe0c^E|Rjh{Pj/K,1IeA]%]V=ӾZ^N5»SkϳQէܳ]#(GM],wzEg>D[o~Y jO;"{ol:Z}f+-,ZO)ڱ!RD% )9[o- b=>Swn=n7>?I .sU,09wdL}nXˊZ i؉IʹmM%Nd\syОWHiD%]+ .Dy/K,u9jhPhU|i$|4;O2R9rE帋qWUd9rϵ6((s qL "/2w #OX[0ƐOq>_?cQY'_I64} ̧(mWKy㗯IBЈqZMY4pTP ΃s;sH䥡 +L@Vzwmzp}ج&&A/I16NIqNm&'s@""9Q⭷F a%JPKE%hBBi\#$o--ycXE.e{$aIٓ9'VA"b n 9~g<:QTK% Ha2"G"tl\53gd9JYfa6HOokCW+:U*"0HTGwU|Wj:ݪVz&HQ.ฃq%K=`_@E$w!gwIXJ8ER<鉃,=rc R{nc{5HIS{dԛGB wF|'+׆õƼgflcOyzKǬ1# k4MæGְQ4LHՉ1oqrɵ罳k$SwuZ:Ԝ߹SiL.N[{d?8=޻C`*=;Cө:oשV]q)FWEm b B?7ɓ+C`|3EߖO=K [f1?vUt׾u|xŏod:a;7?eV;9s򚦸~Y4*'<' +$3&2EjNBiӺJ[S RD;X]F*֛uk ia ǐ $/ ?[6r.>fdE4:g!1\fefam,Uۈ~Vai !QX9Y L2MWS)L GSe4Hn\YSp?Q*TP:W!υ Plt+m83h،!.~SiպL-v?+Kh ,&xLwat7X!0;>+`>kR%#-*'q{_/fzZ yWM-vpS W2N&z!=:[FbbtS ?5"0h0 &Xl~nqKś>6tLHryhcaBj"e&4jڗ.ewݬecza̹ 3f *#`fp*evsPJyZist=l&4=2/??`AO_fOI 8#Kt&0`E38R)fkg*JyknqYڋj|Y[pBsX[l^/^mVR_ϋڋ\+ B*zZ8.FV|^}r:Rᐬ{p32öъaJ:^/F`Q!Q(Xa.:D=?RBPE:tu讪Cn]Kx )b|i Ҹ@UFTP%[ǃ gEr/jԭ#1fˑT`:)rX' 3.7wx&){ CxnF`!Eewf=$auV#4?ƐF zIc2`Lxμط#wBZ$s`no)5}#L*h 9垓<0m<†٘u< af&Tl6M6[m.>lg#AT)t21ҐaZi'sJ{PJ`>;6Z9rDoሟv`o9CA~˞Sj׋7%cuwqzigEϤ;˳SRϚ,~A*Piǣ 4 ʗ{dTTGu9Gn%sݻzn'2ǭu[]jMpTsp}LHުv Fu8 iB9cggWvTZ`\mrGތ1cFzG]ѷ0Kک4]ɝ\q܈"0or@,Ы!$vN[t񴙬^rYm2607AҊ~ maW%v XLX35cTa:p۫@pbTk/8 y .Öc-j2&G??FfH"Yuxvn< v^§8+~ .~Uso ۩y\`G 2asǛ/nDu<&?-ň \7P! 4Og)bK8cygOiP)v mcui;En]]uۈx-nтeOZ]=.JdDrtNF+84''fm'/ 흑[hY)9Rl~Y]YsƵ+($.U%3r*u;S;~ nx- I= RDqip;Kn9i(0|~ԢL5eiL9I7+FQVT#I n1| }in~x7cHYx&M|M{ Ш=ӛ=ky%qJpRq?n"DmhCDM4C}~yt #FXOH0hگۆMC;m#-S;D<_ 5N'Z5;;:ojd5vzMA>e;cxf=saL}xRSHlxC KZ`nfcB'UXMUcjlNh PuĈasp,Gz(E$KNtgHJ²G,Su}5![ENP"})Hۊ,P5/2!Zu_Kc߫A)Z;5kf3UqזA9tf0s:Ă]a$Vt ~fQJoNԩl)(ޏGEhzOǶ72Ƿc&*>;8>WDʾ>_Q:m|pDIDiyEyĸb#ZeI*,~q!r6X'>ҁ3L8.%Gj=R!$P>XXߢX.dd_qyJs?~61N|2Obו?uO|]_W_;@t: 5X!LEJ0ipe*Tٔ`LVZae4Ia ~yC<.giNnpDUtyy`)\4p,gFkWx:S(6k+~sew)ݝq S olK=?Xӊ=Xx~bT'lRI&(p92l "FZXNH24N̘Tp/uZǭ/0>X BZe 9]GƱJ).U~#|_|&a43DW_3:$ʜB9e1\3iR>j)|!!4Ti)mI Aq~*i"MUjCVoWǤCZZb,S b5L4͜h  [܄00:TkyYl3t!fYINGF!2P ucx/uʌM$wqcjdqBN(N 7| fa;/L%ֈ,^'`&JXCKEAY+WxoO[H$XonEP}%EqyH(ADy3N4zUy^Y]> M ]tq֕65-20W'`#"D,SZA°<F-B|ۗ뜑'ͦn/FGF3rŨ©PEg#T`*4ݕW Wgȯ.|m:ﲳ*\onqAsT?$o>.x ~bDNQgx!b8:Ͽs?6,Bfnk6?ut{=QJ%ŊBL7*t㶛 8EYE9c<\XۭKBx[-731`=\qdx>v,c4ٶ*XD)QrPrOXPrO,޾߿-^BNX^H2|!'T %xX/`K5OmWN{!݃} o5{HƠm?CXUq{ ?@/haCo~vdÚ% R39c^!5΢N&.>,zwsqM۩?HV 2yu =$P$;/>9Q$}!I?ZSJas#PaSR*jS$hW*h hGU!DJS`REFi26\(x " ʸd;cĊ q( | Q*!ڞ+4XeVtXnz QU&p k.Ÿ ZiKh:#7-Y'x,/F>%W7ѯ|< jjC4Ȉȵ `).AI i'd+J&@iJ$ "Le Ó |TT" /gO@e `&eG=́ʁ%CO)a7ZM[ɜǮv.pвŦTx#,UOH0Bac%O ΰ˟6ة uJeJ](--g'-A>{KL5 cSOߜPDCw5}DZw1SwPp@ [;z] wP)JgyW{xq^K/L{:z-gLĴun Th9, IA(O{*駄꓆jai1V6SB01/M-FeʒR[%I; H^( 6--(R^SvVKm58t)(&XNYIpx 8 q.Y]BP3tNQ!e%9J0ː%Ne|e aB fM1$uOb1:$ʜB9e10NdmLTÎ1ֿF I V>|rlwwO׫ P[hzV«#{\ pu|p z,B?$o>.x ?y|౷w#ǹ藟_l"|)HA;-J^wR6R}"h%8eڌ^^yeo_!)B#VA82j0~7A/yG]^!=:sWR @:vi:p igң5׏h 3F@paoB}tBE>:w=dJJX_QogH i?cr?p[6Xx9r]9K 0G< 5\PɯrrY8Y81пE? !g- |]9r?K61N|2b'1W^y*zMc˘̘Bf8 ePmu4׉FS(O$(S.a˯~.gIs{oYq/f'㱭 v,ΐPDnZBqHUPUU@R+5l[Ei eD*l8J֩o~JQbIvFv U|G \Hր5fiL]ֳp"J1 A0\B,Ø,[EY}ba= J&$IG1*`r=㖛2߉09zP ^VCIp;5,c(*TUDd pb4xXDKH}Lj rv2I_n{&nݗ_3ޱeDz7߯ؒ;[VFIXf"X$ Z |}8^n͹QE$f)o1 ꩘Cǹ?Ee1lYo6){fK *r˝i[.Ur0Flgפbi;3r};6׷VKv2M;zA.|.%%ߝn %ttʭf\{lpOyt<RLyYl֖eÚ4m\]WU9>=rsTu&ثw|뫞zVZ=?&:9ǔXL(hj#Ӌf@ y?,s._$!3Tg|oL*ڋWQV=[1yO׼s|V{`Hr޶8P D(lACSvC>>_E$SECň4MO쌦pFp 5AiuCFC28+w )^ ~uX!̛Ff > =v5S#B%'rYxߠTHby !HX)R],(( 5)3Di*,` :jv͚u@:NV{r+Kb`hԋ(7>0 DOt#SBjÄ!9DLPtGHL)&j o69}7,o60y6098vdewϟmȽBvJ9Ox'/$WDH3%P]PhIMhk鍌Ez!n ޣ}D'*a~(gFP/Ϗ/pkﮞ%=O8'nyJu2j1*qHII.4#jX/n_3. G: r?lHmj/ׄό9|Le*RWt;,|>? (I[8`caZP-]C9*nt:h-7S%:(0oSrK:-e%|:HڱOKQ_CK+A15ZG2(s1!UyZ19Y 1bAYqc#i u<]9_cI$"[UѾ>Ri(QlV^xŘqжcX&λn`q XV.D:MiHR+e,YZ(o@ d\&L/ZO^a[[lpu-t0T e||Tݠ;-˃~s /Y=9y6@s0 g>Fȅ ?A%/o<rI n\:yN;%U )(Xy  5;>#jzqQM7Ө9\~;xektskp9  Q5G4WAk)!9:-*\O8Yltt5Sѧ/cD0}xmu_fo%ZE*zj.hۻVf\L&bܵkKJXvpq}w>^t#`^ˬ):Obz*3h%Ryץ&R3P)|T^CAeJK*LO'\K q%%/K\pnBpe ;Zn)7Sms^B?9qTW?Tv>[{WM"7Y*Y/tӌidzD(E{{{q7'#cg/VHUAQ&|bG[b֚rZ9LŢ èyܻi o ջn,~ysՊ44Ӿ. s>ܼt">MFW%zEQeמ4 i8K0wy or\V{6.&K#JЎ,!n}s^LmH5wix{Ё`=Pfs@0lsoRA=Msqu5#k2ˬgd.3HF0K2*n0{^^:Ph~TVb+DftA&3#iLw͗YŹIYŹgq栝\b,+),*sZDAR A(RMo޶Vm?ڌ9/4-:w^8p߶ Ej1e$y\Mr7uYA`砬R["W%$ th ,zaBoP߶5NBx5ju`M9FGi$Yʈ>×ADӈ&s28Z'|?m8UH +7MXLCPh(9.@,1I,# PU>  @TMh;|zŷ?=E{ ӟsi߿ݡzҢw_%; ~xN$*(ʘ1W$"c?'WH|m/k\cvZB'N:Sg&W'|Ԁ0gP,a',29D #0c%we;8? [AUDޅ9mQ dFEt1m`T8~Ϡ"04p tSM73㶗d'7 4>A7k~f0J)|ٍ&]Rb{0/!< S+ r]1B mi'[uo~fR)D߅lg]EE碌Bû(TU'#YC3r;ڭ$U*9>8ɮT2zpnJ%4j1g.<:"0|\*|ܭgNh@fr%Hr&0;dB(p^.~K^ b jGL8:zm~vIV`eύ@,J㼈b=}g0@01ec\ :ee65ӗiVI4CJit ׫ٗGu:5:J˿!N-6K!"S:V퓝}N{+_=U$Wo}uIANM|P: A,'}݇X> vju~Z5~[Yze>-@뒿RRTtE)0&C!jo^;"qnoL)-$Hte*rf|t ݴ{4t5D\ut$x=U5*I* VVϓ>Z~N{ foVyoo+غY7JwHVyzGc 3J^uj Ŵb:ɂ `8:6Ņ@kq!=\:A4Ѩ5[NU=6ߧW:VV@Gi'V$ӄ)o(ة烫k)ȑiᵴhwx% jz_i7cMh(`5qp09Ơ^#֙v_t;TXQ+ЦJB=jaq#{Ԑa)Q3IKKn,Q"(PQ{Ps)ecIZwJK-4Bd8LI|CN%%Vmva]j-d{T9hVIZ&W)0aA {zsəcH?R+r.i7ck ~~Eo]rUhw=nH eq2*ȇ=pd~aۓ#gqf%FljɊ;zba+MEt6_$} k.(V >WUo~O|)޻YEI" M4ʦP~}n?([L'b[@[ MtͦJ>Aff"% ZUޜ^_[ۻOO/|uه^߽*^.?Ao^^{Y.(xqVyqe`ͬNKO$'Yl^fof;{Ah^Xڙ&%ԍPh:oB+8 qD,fq=,ӘɯvpK{Ao?^tM^Zm$oUnF3'DIYrERLg,9%ems!_X>I)Lñ})s |TMϥ{OEwj{J Dwva]/l* u{LM"w^#㠀'=sn׆N17ƲIO0܄ I$HRAՒ{ "q W0Z`JR& % Ǖ)ˢrufF>N=bez5PhTRZTR,qerbVFh4i\CY5mŲ]U A%Fks݌DDja8;٨+VȵFi(5/aNXYK^WVye+\23nCFHVC؁oTlLmFCHӘp)a˺src?3c9B)3*K[Zx-,/iY5*lCiy̺( &ЎlKsɹfM-fkgJ*]3lB Y`p `bi!cJlIa\hK$AJ}".eI\h1WoKAQ(l= 7f.=1|p÷4Nc0[h PdL%#q} 1 (NrQd])KyA4Si{^RK}~5mcnR d=+ ;j:-.oa WX!Św6&ؖV̉/HG(4wOLǨ#<j s5B5Cs7оy%PJ$ zxӧz.hf[މwĈG]$ĈU`[s_֞u29ԠUΟʱtiA}*A,Q :`M!J3}}n*0¦ӏO`0VGj8ChrP'{KM%;t~aF^QkFK-d |.g'LB7bхp^02^G0),;7(jĹc]7[[BL']/`-`2лDlQ<ֈsûiaG t"ɻpAb-|&fS2Ц|G{J' K=?V>N716okVIj-qH-TByB zuIvUjd18יuЁV'oȺQZrU;A;ud8ȫ0FѬEU++4pOׯR7JkRC`? G3>|~^Y/o>'m͢|~dܫ/?+""md+~$a CGA?Hm=Kj@i%?O B7$E3szXh&9KbKm&Hȭ(HS.¥f $ } xVܙ/+p@qk)D5%d9^\ +S*bm(:POS($7}2dl1),L-Jz~jI?U,Wj4{^Bw$bك&H!"x$t#s_#Af{I/Wftl Fώ~7.|%.owb2Ȭ"Òa몦0qȶ|cV׀(C!J.ѴfjώؠQQ 5Zm#=>L5;Yl9؁SWxC@=&Nl&^ٔ|Ckz&&ծR Q5c:c 2 gue\!}Kk8|~$х<]I8H>FO D7u)įm:7 `GLGwFcL?[@BJЦ?sf*"^ws(Dh ģĞAHE#9Zvd3x! D0ve ƨͭs3[TrVHK, 6/ZAEhb(rZi9Q>Jngrv݅|JmA[#`65r(R-x^)9] cl.i%ŲʫVUiJtT\Q%m_jCyK_}S~z̰1BX̛,K.'q[~s^ Ʌvco7x|L/z?@G{|2&_sϯ_w7-=-rlc6u>m0ۥ憞67t,O(D &9/]j흴<N,4-4`mL>&XsRm ʯ\vG= ӑ2-DB2jK-5zM4R!vG=ܞ%͛a%cIJ 5 s}5 Qm/Hzݐx! 2-CE~NR$m-r+|0&IVW<:%=I ӫYi.$dCVNcLI0N|i dWVm':'vMR,_=&_)RtMS\#.Y5L"Pb_ yDGDW߆e"rK=,M.f] &?]߿Y5ŋ{~}OOAب½|Mgê=ooJ4F䠛^r݆L{ozSElDzdɋn;)N1r彬[Rl88i'cG58M Z 8&MmZjѣ2F@a\ c7A-ءKGrNVy aFL]F?paa)b ; :UlwwtY׽Ls50p zxk 8Gxzp0H GT"nB=;{ʁqֿ{(Ep}Va=Q9!l;F+G#t'_QoG)iv:tУr!ρX:o,ê{G! uy( yx6OcpUoC=B!TF;oKOR8DzGYnMm0bQ C"Cz5]U(A+ѓJ;A3t}*GRW 逹^Ysw֫O(/{TF~ y՛r&S$'3F{^cG38+Yic%INIqDV=RJpNu'9!t'= +{ ɔZE8v䝹:iR*cG3fcxp%)k[c4yӡ&Uݗ~cFgEͦ}@ko7$,FJA,?hughGNaũ ByA}2#dV4V{-ӳ3EjZ(Ξ+͆FȾ9PNa\T?{O۸_$Q}L ,0A&A^fa7#.?V.1!曯T'O2Od<gC&ۼs2GΓ?A71&ooԤcXp t"Ik=aIIyɿjC,pؑ ,ġ|E8~Z__ͦs歹E:Jބ}f3.'n녢V)vcVFly-{qìuޮK?g(yYV5M L~%l~C?}Bt<ū'տg>SKxplHbgawOOΎ+t+AQxp@*rXuтZ*6V 2hgQ. 5TFǖ 5L!Lm32צ'j㞷 %Np=w_#51`pkh=]7Z}DXiUtsi=5jGQ5O hr{$+/x~)*1Q︉J{N[dBZ}qh [&d4 wjrUϢ]vȡ{a}Aߞwﰿ k}P'۰[bpV@ T9jaau}./g}kq,5!{aQX̼N^n/*"@'ԫF"*>qHQSBmd̊]PV*Ni%>Wd00c.S\JGTyk܋k+%xb4(oTL ]^a"8찲g}kZD s%q=2p`+e(i9я.)aJGzыZ #%zj}rL%1MFNB:9%-Ƅ`m~ 5%~oW۳vN֢. K4qӴ@0ihUdazfGGպz}̲>ޥ|_&ЧUf%]/WdĹ?4V=XQTP55`t_}')&# zRaSX+P^H?^7ZV[4T磁`mU@즽v+N@, D fUU] )U"VhZg+` 89_;mE*;ݰ,Σa=I諑u,ǃ]RCeRK' QEė4(klƶUkZEl(p&hA`f+=/t@57D0,#/G7@ { pNB+}:4>~^P͸ =Ȼߜ%A阣G+ g%d$A [2fQhel+=lGxv9 We^ 9j݇hL1xmES8+b^lN0eF} D} L}]r h%lcKC(,^Í_SP$6̱Ip2f3P]<3:|y$9vLiŘra&}̳p! opM}{w0vrE(=/~@X/ ȧc"i$n ccgdDmum86B طǬE:'B)j(G-9ٶjfhΪ}vEξ95E3ĵ+B/UqъsYC,mN!GAU݀O%fH^͂ o+TB%cw$x4?fi؛EC/'+Nv'y%0DzۻpEl2XƋy`xO1 V൪~]&Cʇ@:Bsc@UL܃T{uOr%P.k酥e^3 u %|rySq7̱W- Zf'EzhAGӳ[FrLUKzO;`] M׵)0s: nFbrw *!ʽ{9X0.t4qSD !_}};-iֳ3%b@qP`kRP2M0gW5[X|3Ǔecy`ĈoX8DŽ4oR:\OrIfuzQ1AhpAzJ:wjTZMΰc87"`M}#dq|:=0)MO:kʛRh@K`i 9=#0< d=J&1x|G=}?@ߊF%d  &C^69 |Nr)%VuRTBX`z[:hOm4Ȥ'B'dT_,ԻNIw +ds*jcTڇy Ad0oz2DA,96\w`?л`9Pud̵QrLmIζ<ٖ'9mٔR\K)^, Q8\ 41\6wl:jMAacvS1EX%h+M6Fdzi:CI! cZ=:`a;3<ZD s<+6;F[e*I@BMF%4> cA9h0[qPO?$d2Am !ARɍ鿇AQ@|t{+F(&-d- htI^޴gD8C@b܀%:@RkCFF_MAߌoA(ƂU$H\))yD3> "&4ZGyK1`r0uUw#gπO:eX2h{! #ш pvAR8IA=̯/7ۖmVy7wOf)`AF}Ր0Y^޾>m0x}K?E!LVUȯ L~tNfxWiUN9|w秳\s?|j B==9;Z6VD⚚.6ߕz/XR eDj|wqVR(bR)o%wEX,y$#b2و TA89J=UGpV#D#3 v$)P-AEh]A U #>kFW\Ѩ0;*2mQd-e ck)&8OUB' 3v͇u#X褡)֫ -T[)UR&]ix\3(24ɟrACqύZo%eFl7<+S.PBZV5q͠3)A;D]4 v⪂2mF&09hAB5=(ޚɗx\3TPau!W16QGI%eaLEDwtDZsSBcySsDCw چ/\ ,[40mѾu-NJhϢ& 5( ","Gh 3RvCd8,!z @@o 1GN Aє@z>kRۢtBؽS}z}9 B$*mV.zk$^ hy{d̈ ?as9hP.3MGKJS hC?L }z2/Lt."ޖXpg$7jcTiyC A'-LN}@F&)ƀ(Ad[Aw5gښȍ_a%PnWi9unŻKŘDiIɎsji^s,e"4h|h40qT NL KQn 1 Tss`%9FXG7AbRYz^k8s/]]οpx|)5]8@qIql0Sn?׫~#ww"apS@Oaޗev޿S~àM0eRwi!5Flm~w.U$lۦ}̷XN =Fo0ES"ܭDq.St ۂ2pevrxPW/J?al+Gn?^Ϟ@O[v-4NxoOͽ{FW ;HYk4qFWb Qdm;U罫cJZ ] 1IC1+OsCiS&Ę4Uioӻ{G״w?x@Uep-7 f\s޵3Y%P0oqo֛\n6ý+Cږ?e[H$Eip="tJ<\^R@(4yu) 'jKN1b8L^tI Š/N:i!=qҝįZ#Qbܱѥ ί=a9罵\j3 ; ^d7h(Ɇ}-hJ.;Tt\\pũ}KR Rd8LIOkɕ  }+pBxJ0䎵\#-A {ĠŸEJYH[6| BPk0\xk?nO FEv5v#3^=%j̮]+7W3sQ!ѭ]mFX}9'qxH5OE/'CV(W~"5j ;Л'_< #G S}:R/Uk9pd5}؜?tq,?\qR8q,{=b Gz0'C'|gƣSFlW{Wa1FШ_<ukT"$:ao2:-)kxր歿i[ qUp-7 `tds 3btIp)$1Ix)&lkNSDw Lӡ+УN;8&gם`x뎡Sw/(umݤ"(W.dP[K *"wFL2h#RR喋(܄gd,HJ*QIu؍)9)KRU`9p{Y~$7>FZWNt8Ei3DS.X~`3MX^@+  d[). _{$j3XR݀12ɹwfhj"\Th@ (V Ȥ.%YitfK*-]INmI NNWrc+G5_J:8c(QF'x\<3lL|ˏO?.呔+=}^#oKSSIu=_<]d3xMx:)>g(q6NC9Qsӓw\K֚v1=O?? 5{]y$Bb#9V*rǿ%O꜠$B2D)9zE ܖRɺ'pߕHmR˦ &֌[O(C6OFkq4 BM8Բ!J ҴMPL ĜNR(0K* JVX#XX0 eă n2F o'HMACg#1r*&yv6/Ϊl Uh!h&b}qn>}=v܆_,>_ӤVlj&;Jic|BvjeH+D܉4/Jke f|@.HmyTmHVSi,Ğ:ft!B3 S4`lXة %8 z;B=CYy^q?>!=:_؛i&AJLԡL5Ҿ߾=r{- v?ܺaz8Rݘ/ k4ԭ#C[ hNDjewFB\x_ҤD`$ޫS9/p6d4nedNR527U`7b9 d_SiC,/r 5Zq0kUSƻI2*R/{L܎_quYzݥ`!wkHke4k 枘lq-~Srw܆g=i0t3\`Nu/ ,~%dW z>*s*(s L LfSMrˊJ{)A)stY("7RYV9*$́,7bۥn+i˰L4I~$]AuȢ PԴGp-B ;E& }:M(.!sf{qsK{q{ eurqsmyx(+D))rAו \u]'BYT` \#Z,sZ23E4JޠKɘfw7~;y?-F+S;z; &7\rvxX9yT:"SKPtYIhP%?ў+n_pʅW~ORRfLFqVʌ1m*/5+3A)W$C8N8r-ycurtyk<0b៨ X5}lfSn< C *I{Y;khcٶUp =VitׅY~":x"iZf%F[ua5j7 8h6juyZ{6cmWOW#v Sfkm!)VO_Y~P`#a6xaH"y/ӫ6w<w88z%8x#H8T  bj5r,ߔ;M~?-v̺Ud>G]\o]q'A@ެN<Z|ر&3RZʠ.1BMi7# #Y"t{)arIԌѽM_EtBIE\> dԈjm*тIi\t'Bp4ZP6L؊)'ȕbJT8AƙLME]hdRazD<7RRcZ̬õe d]mM2bĵeicKi\ixLQMB*.&KPJin8UkBUnyt BUV,Ӗ$Hx]ٷ"jA1qT۽ [* ]/a6'=H|hA t!BL#D olO 'b*⒛zAaȥn;O`!sԶMJVVX5Dnb5:#]c;!$GA Y& wxS -Sy^*\di5ޏWFnxYe,y D9nLA Hnfa1\aF $'j, ֙%AC$Wb'Q:ϓLGr2 vU(nSǥ,VmPs}kK W8j=5SpdIᑘ@L%'.5턖Dң -OKhЂucxtź"/kc_!"܈ hZԋִxKa-PrIoĈs=~H0IH.n܃֚<\*>4x͛)dP~Q)z5'J[+Y%lQXx@)]쮝PE+, Һz~RLNt1W#VfEMyAp&̒Gi=䐇3wb 9`͏jq+(H 5}B4gP-pF40D_Ev^g\ v|t<Ż^꒫AI2| /4 R-⥻ JR]7x]wPT{}Y|Ii }ko.d ݍJ Za$*ߡDhej6>U4·=&.Kn+Qp7]L%TZf\|rS?״-7]ygUKpyWvs :﷩#_.q#Om(p>dmojnϮx/ /Ï\~JIyr(-qiwK]/ r3_/"w!DKfdˆ_hsm ؼC$h~2p1 ɽ y[|2$ɲ( 6*}L=6#lR^HJ"-綦%<ۚ2ž tew*P_T"2Rӌb`&cg#gQܲ G ("ago5 N ÌHE`9"`)xJ4R#,b d(2MgtoNtO=N 8ox&Fep@#~Vtv]rPo% ;i;5t8)ՓhR2iߵ+ǽ.A@ :*5y~c[L:vJjՅ7ŷ q7"T# 'NtwQaM:Ld_CXWx$ Х,6}yom" 5s!Cn!g\V^bz ɯ{X_x/Ot7߽MA}~0/f Z| BerГz=4 5P!A2Ź߳ZRA=0U+|@9Vm+_8gHSz+|!BpwG0g3lupnک̦pnée 2Z*[]m*`/o>e7/nqI^F=լJڮJ][ KMŭɻNW`!a܀ڽdGWGtk=u:{Ps) z3LXհS۾fMg`r\|Q䲴+ }&3űw!ǭc,h BAX%vY@d9"T}bXzTs @*h^;ɜ Ge$J{iQ )2P -/1ր2Vvs okHd4j)E@䑐fia2?LSp} ~`/LNO72/&>EM j;8g ;i/ameOf^`sΈHfVPΰ;E5bE~^F.Ii-ɵwKA$֤ -KAqq[8=fǂ!"̑`dR9[!xx 6mۊc6Y欏휈K3 ʈAk` X_NΩpg\O62KKp)#`FhksJ Fԉqn%ƙ68n#91@Vg-w$R#hgݶwJROMo3J338Ṁ1,9 <ͪ/Tѡd]6mW+^lKj융"1 u\RS !B9(,0 ,#J0%E@mGX JuEe̍gW{rRrTYӨ8{ cl)5E'{q2F)|+[ H{&X (j9QCĖ'MiCPI.c4ff{7Ii&4)}I[ FHU#0嬿[_rmt MFKA8YS`U_%?ֵY[V !}򮓐*,ԌN;W$hrƌګ#T-ޮ"8.%&&]Dq.:E(tt6u̐bCʕ2_}MC`xg'Oi`ΓjEs1냘bOolݴ+ԍpіYcW^gU5_uUE%V .fLg!aDv5ԙaQțcvYaDaǨ~{jn]Kac,,^AJVTH]e197*MߣR:'{qAQoANH9H!"0!W";` HnhV$cZ~ߒ%{[s[_Wt MF^B*)F?L[UKM9lQjE+触W㌯?s3>(*mOl"F^s `轹LdE92PS6 j4s},1cjÝ:0*E+bgELA2f7ZJ#\Au<]j뙹xC~: hn!d .75!V+۵\'ol_L÷(. 8 oho&{4 ^LיD[fvG[5r^ c1-DwNY{-E/ OPxbe@ mqďBT nѓ?`r;.տ.u<)caCa\s+VIY!sDiBWEH4aƐRN&;Nqj3R҄)ׄ  fA^O]C^BTt{ {1d&p,%S70)%m&3 ~̌6 Z"D!~zhtp#}Czt8qihUp!X?+єR| 72G `#8di@nOSĐ3obz4_\_oP=]YΦtxV,t&NJʖ'Vɧ9&ɧzl)#2yΔNLj_.l:e1JqoR=wPcIF586Bc23$!͂Q̡;AjCL"XLMxq\4IIJj 5\,x!VTZj<|\_jPy;jplۨ#.l [g8i)÷%o<6.=GzG^ϗg4AdRyKs?_GaX6`lVg;kɃ_+8:"{<Պ+yQsD2uPBrɌxNRK ҆X vLr(V0 8,AX<*aS1Fv1Yz16`ImJ]R†lO3!\Ahd<ϛhg\( feŘ&JdynX]O@YIq9%B-Ȓ u0䵢 aYC0kSuLH$#Pĵ!QFlb3& e<6#lR^Z_J`J++D"jԜŶmEzW~]ܛ8O7o*h/׳U& fzu_W SN'WJJ~zS]X~n{='~[o |1|msxozjР/z3N-#=AO:U}R=q j1dBwxUf^u֑ЪPZ%8h^vȔ1N 26h F9,vG&WsRƾZ^&;̳t6J4*QM&] T0!,TX.XX̹',_FD+b'DjRɞwD?d)v=U;c;' h矗QZLS*Nr.u~3I?R̓Q ޻̋AFe-TYˣWYRk*Eɽ/Kb7t"U[+J:^ ڧt;|x^A^!$9뤢~o."#q_t{1¢C_,J q3(RZ `H 1t}_0)ȥ| Ә*Sۼm˰ɪi$)s/mL21%'ِnjHfpbFD{X8\;@ 1o/9ٻ6ndWXzrjk)HBRfS1\8THa_7 ^MTlQZm꜠ۼERѩI&4ĜZRFb$3+,$:nȁH0܊.>e*>oRkD%S+XRI~cZ~j`zFS{CEjZfo֪GQd5PwgAV=-o?zuZa3 3Nzܦ]{mD5]:};:޾i߂*nxg>vGZ v8x\cO3[P`wd h-?S:aaiY;q'V$ʓD.~бTΕ|)LI˕N-=}޹^ܽ$&2"Gd<O|e: Ⱥ3cMp8 N!ppYweZjUápH8% {uR*S?'T@!ҩkyS5 C9)1rE B5w3 gT3.t(0`B Ky'xk"'㥦iѷ sZGSASjdJʂI2HC-,u<&D&5wK+H)ڕIH4qy")LޏBg |'p_ę?~:,ϩ!0aE[gMl4LRd],|o'w0R ԨYou߱\N 忿AĄSYWW1˟#JsCZIIh I!=d|&0i4J"fOiu]HiEݟB1+~QzSQȂ0epDO02hs90&P4QDD@ NuQ18LżpZ@ipn3?4/S}4e9;EӶ9.> (Rww]EfydG.h@@t6X͙2zjꈓ¥Tgs}Z$#𚬒%5&pʅ? [%SGC{A&| h;X=c8Y61n6w la߆mq/[`Pk\26+m%IC"5;񞈔KSQ%$?r5\߯ *NTE׃eﱞ7\ϲ!HP@}nEEslQ܎B :@3:c;CmI'i럕K)~(w'n)$zx'UYy*rU/UpUIRa묏ǻhӝ+sM>VѨ`uh.n{*5OvLEbjSBrEHj&AG^Nj`FC (hNUQt,q#jBؾBE{߃Q^v ID*e&Rw:W89w0=(}_ ҇g\#)Z-m$wl4nl7e+i`b:^}:'p Ign&`NǑ-+[TTh@EOՑS5ճBZJmތ'YTWe\w\5D)ĝJÌ:b~apBZgR>uԐhJqDI<Wiݴ–θC,M"aVİ&#~sn#^q`N1Ъǝ%6M 8;wH+ 8lk+Wٝ>.&Ҋ&[4yU={<z#7 QvH'yhJtPnT!Vtc2 DE[TJ 7A-/[3߇;=eS |e{|^ةlr- }1CKTà_ð+BX~ K0cLX^E9o@LhIdTw|/^Hħr+%FR 0Q)O9NK01Ka X#9 & [*945:$d?KԼb0uD2.r'$H) ɵhepTKdIFC6Z*e>nz;!aOrT-ʏ/ g-d #<~5ɥ!R7Wϟބ}ȳG "Iё s5lh|ow%uKLq<烀d #.R9%UQyF{-mFs$к 7C3~JU 8 RYи-rvql7c k!hڡR\X=I5lOGI.tBoMcO򣦍K1]z+ݮ%~xBkN_-1ƺpxQcԺ /SQ~,YѰpc|B0kIp %o7K`F| / gw#V&΃wO 6m0ۢ/[Iw0͹[Z NxJEr If1 + П9t:ɉ5>A]½ـN$m0~?K탠8%hHfަOfybJ8)q-2͐̔4MgsiRj*Rqd&XZJBeA 6Rf#& DÒ,KuR"2!SRa&90cT8BS FR2 :H{'uPmkYnBIm=+V jNIQR^(ŽgYƱ {c5( &YҌ! J(d2y;qʆL3HZ)3Ljg-oE3I}{5ȿ LWЛ*yW98AX%(aLKBb%Xl8,-KdEB"©̂|jbJ~q8j, v԰QvV:# 5q+7YyXI%'87X$ Mf8;%WR kJ cN f S1&CAl 'Q2zk0KHGn?{WƑB_> <l{//0}]JTx8q߷z"G왞b'"sT/ER6eo!TtjFA{r ־٩d?Nɇ)$ BHm-`ss0GͽL{ڸjC6VHt;FATw#W ڃ w k1ZZ:1YyRuql:5bI* Ã?ż-3O#f2y33oyi96j}I#LO*5֬TC8N6)hޓnPWpTVp{wձo׼?0xl:D 5)xE%BDvJםf2`jݸfZ1:1ɄXZ.c(,QH2(!4e0ÍUd4 H>0hOvTcb Ɣ8;AEM`Clh2=H#%aoH'PJ16 2J¾}w?Lݡy7MoA(2%p·nM1vh&b f\!mXyifE-rBK@aL "4k#aPG\1sU)f)JXk|!~$y  Wy;J)3C $x #PNw3eD'2Nk0fp֏Tʀ"=Ϧ#pm*ZfL\2{:)-L.@n3T3`Ae2-NkfyC; ]Iǵ=ZKLfDI5 EF\)22U c;NEj8T[{<Պ+gRBS<ȹa\ P/VZ9(E+wJ.0{nvX8X4VbkA3S f ,C TsQj͹3t ^[2iSJPM0\*u3Ƃ ˧ﲩ4'I%>.V#jО;C!EkgHzʸv3C"&rB9Q&Ewr!S/L ) ջH;)0Sf* ?Bd~ 5sjV $qd((cF$VX2Xu&EƯE6@&l083OAA.`P) 0OIF΀i A68#heh$"c h !Ga4rKE%ÄGkuY&uz泐=ͅwh꘾x![ݘWb{uj`ch tXjFAtr6O @*;(&ot$-TܱT7GD& S"\{ kZI -:XA#3<)DDb{;nR+<^+Jf4aKۖ5"!7F)҃ _((k1^d F Wo".MEű1% 7(FZ''e]:8#K$e!qpP30mE1emOPzDΣ{[z"ݪ=' `|"1Ӛw>gب5B s+/>VA9;3NUaU|al ër%.n^KUﯾ:V@U'(*v4 r S28ϼ8r0;yK~:V{z/{t>7!p#Er-{$z'PKi=ӺQ1-N[Ə7/3Um5:w)ANfLO%_LJ p̳@gdjc%~UYi T(, dy%5cOE+|y|=_:Q!rЉ(OϏM_O(mUgة~&ȉi"WHUV])Ū;Ǒ(vyOV=R@_DYTh bO ,8UmeOkhɦĢ5zί~?EcZcOjDeӡ+XWT?ֈS:St u=.lg؏_ 91^'G y?B#4<#Xu<@(ϩvXy}>Ւrm8D~K7EᖝJ7c!Cۤ'p+K=)pKq|:>kXnu1'Dqvn@-y 8gQԄņ@ao8_PP1.](sm+ Dt>QϨY۟ c*GiTq$Fs&}|K=n) r-_oubc\x?_Bޯ>N"TG!Ͱ'q6 89GJl-,c32QLJ9G|q(5Cl]qn67c: ٧4(Gqn8c@#P`11ش@Ʌ5XFHL"P,Bᓛp,wey8sRG08"][ ѧ՛-Uű%ß6rD]#qg?/EԆueA #^pFB(9LIg "C(i3YQ$'m$P͑b#loB$=vt)\o}JlحѰ;"J`H4@Z5hl0WyHd3ÑȭFZA qLQ$ +pqo8؜8')s@Y 'Lbxd9YیP-=ikn]J5d5u0hHK r 2OD%UphT &t3-, QXR@Z;HrM"?A9&@,O )$zV42Оam0Ha4e6!JU#^?LhS)ⱹzJou=Q~zrA _ oDi _Q X~=ܽ rMzYWg-W_oּ{ޮ@1ڀj ;}WC/G`gc.&e+wяܢx+N$R(c]{<t> ]1,ĪOL2}9}Dܠ=z7Z.iߢk,$F;na=e( )F &ѳCsjH-Vffު8gKN;,eHhe5F P5Eֈyc/G-`+E1[Zdk$]n/3Q;J4hM(cWoAuc |s@?҃;snE5n\2@W"s,ZEpW.a><^wJD&-4a;?ޯo}$Ǹz+ʏ'3+\&b> \Ux_.o݂sq03:*đJ Q; >l.HRoI<Κx kV`x{UvZ2#:°YvU(,)aHujFcf02Uh[=-dKQ'#ErvG;cߠkɃ:*PRuK Yq0N5*GjBI=:J+֣?=4dvR 2Y MArINM aZ.me)O^M1_}]$R;𷝯g̯zĄ1veU)AދR߱ ~Y|YFsrx*"4v Z/BcC]L+#J)E)L`X,ů E4Ik7[*1&ӊӷvKhvBB>sM)*zvKn؞jF3o7Oy:H-aܯC myJ]q_꼹 aӝe| &#iQT='>$k@ fJ+~)RBC՞n|Ԧ|`}7`ٻm%WTzdshҸɩIyIJE@V'M IP(AE)B l~hэhBdV[qͦSBk,VP=X%s7&IU U5Y{_sb,y]~NiS'LE*cKThO +F야 FTmWDׯ/ncj0jnsa\uXQݚ?g5s-Ë_њ%\& J.ƬM:NQmh=WP{Łaev:3S@/:DZj{[7=X9N:FM0>Aayf@js+8:le4]n0, l\2H/BwM]5N@p%%M@qeJ56wfccm>7}X0TFP(afQhsG~ݐxB7(~3 M55:ƕ߹WNP^1NP}BcKfTjѓ?gH0F9k!]Ԡzv^ [{Wa>q ¼& `M mR?TϱWD0gߣhN:k);N9ͅ|ٰª}|od -30P{0爂E?iaёޒtӞ @݁釔Bӆ8~sE c>;4Lv4(^h LGm< ڽ:AIɘ6y.)9@ϥnR0q7K9 eh #=oG"=)W/^>{dD.UXOT9[y?7daO^nmzWoĭ,MjɳV=n˨ɥ$M2!1kjҌ~˸hZ;!gVR*|J2$]4k<܏fW˟C_[8 2ݧtv~=׏>醨#ϮPYqzjyy|ΗhJ+R N|[Zudz}bTBy#B)! bJJu؏0\0> [c 4)dIHOB}bx4Z3gxKXѵF sqa=Dxr\Y:&maZ+%_8Z·s.Fk3 *[3=-svi^6ZCYq?Sx 3: VaCp;NE/i9^ ]-%JkZB3ƈ.p8#g +2$ '3Bvh%VMӞV5_ 7Sp '3r9r"C 2ݡ嘺-[:#8tiBጟieP6D=̈́2R h)3*iY*z[n$HwКz3P( Y$wO UH8CNX\׈U/^_o?<~AE} 7̗MC¥7L!L&%ؾ"oӏ8kKÎ1^kDtfz;_n xpl{?=*$/@*#s Nf uM?Qb6(fx Hy4q`yqK+C`6(. 2O!K1)I`S4lR8lp֨Tq FE:#L-[ ()TӮSqvhlWE˫Pj49I3ri#U !.PAr 6p^R p&-=Ma^J'bMk0ZPf<3O3&;t$jbGXKY^9fSc\βа 4tqJRIq^W+.l~ξFdE2|x`p*Ia*อQE]/o4чk% =@Ni-ŏ酸@r]h;H0IL)&C#Ew*4׎쳐W.^\}:V7X7֔Y&!ցya#e 1EIMmuyu8\yQDn[a$1+p)p~iz#Y[m9DA(򐊑6Vq z (Z'~~_ǬE 劰&XD |.( 5Z(~[?\/V:31w2Z,/)9cT3.R (Gƍܟ/]O1= WtAun6mC @qcTꘄS~Bjc@8k©TjWVo5^yp;*YM$&qkA'lE^2E8% o hb䅔T#ȫYrVk0i5TGDg"Ky"Dҝ^V8֍Su6^bH3I̗3EM:jWm|b拻L~Zԅ[اpw4/PWG FhچxG9Q7@XCPC#Ħa<} M[ g)Ju (aXa̬f0XPY3`VP-0Hn\[*xXX RBӧ 1]G0 G%hU"s|G5N'AIQE--d0ùvEr>䙥y۪Jt[Uo> S͌iΕycp{b|L̞|' ̔)dNk̶>D,,-bp`޸SoBj喵%6Kc}V*L2AQJ)yFOճf5njhsK>֬e}^c,47hJѭAbd_o|0 ͺWf7Z* u Ϸo\>*4ԉC:bgIC!EĞO4Me $ڞ7BXӶ!/%!18wy n+US!}FT6쾙LV@_ΰ.A3$bnzOE׼Tn Кjc1!M!y wL!SqN@lg@zۜ5mh+@;B4Su f_%a oMEcח"/ XbtMjC^DXֿ<5d}֜@WT%@ 蛪Tper;u ) %*C;I8˲L;'3L\?O t\Qz bc$K9za یzsڥI 8@f jU0 s_eတ}'>St~Vmu~fU ld1C Z 6!Gt0-N÷?wɂ,D+׏Dž&R8usK̿_7wc|=,I AF\,No}#3wk[d +ӂ˯kH&qdKэ>ḥYJ) z0L S?Wb3g- $ni AĵP#cx!=[5bYd -ώ$c9μ J'>LT@FGh 3vl  Fᾯ4*PDv<)J{p]UQ-woF(mܟ57XZ,Bu"ڛo*Ў2rFmbgfeoZK g%U'WAyhWBw1z^1Yص(ic hWU4FAзIAJ.;BvO2cqjQ$MMǦ:h[+C@v9?>iyZN;#5;*کUo ~vI%)|qcIW(N{՝ifhW(N{})eM̥P`|:_MoPƽ"Yu1w.GsӇw[_{heThT^ԽNPˡ/VDi^ÄCrnRT3N{0)wUz0+d\i) / ?#PɂIoJtz2M֊} S< ͯ=dnp-aQ~%$8Ipn&En^w81Ȉr*H4cSD]Ys#7+ xcH!qC0LLa/(\յ$նg&:b,Jr;%TH|HdyPRDmC 14x] ㅃi>?pg,KX@g#lpWÍÏB9?1GsԖOz1<[r5.5Auɇ`^F&4\;-"z("J ƥ-)+ ,y3X{_K A'mD紉$TYKc ZZܣ\,ǖ \ Z-%@, ύ*8 '$UE (f.7A&RmT$5h 3WtWs}n Ì$Bq'R, FN;pNiك ,I\CP TNƤ&$;u`HN^J(ɸw't7Tܯ9Y+3ōOtkB;>HVIOߏbig,~ϻPsҙvGd;yz?aNAwSt&nw_}) 5RghOϡFW/aX@F/ Tnc)dN$ ޵AbugӘ!wUkgwh[8;lWo%~j4zC( :KO;7^ofjn% t1܏-SZ%xf 4f2+ޏ:}yzі)i^wd(B@u͞Fz+Ve+?ǼGZgٕub1ֽaQ.7܋G{\=? Ε]/f~u[ $Z^\n' |{yzwIIijʲOd-NM}I0WR[  IQ)+-į y"H޿ Խ?vkA4v;=gX咲{pڭr_ !zKdYy|߅K@d N<2$w'>ɽx;^x{c˻Kjͥ ѥކ#T ?G3kjk]"wEMxn>V9m7<O,%6@^tlDǫp$P~?گ.>]_!ʹB1C}Xf44@Wy'sd4 f2M&b~Jl<حA* xܽUȤemwpq[o_`cynl~?QwrZjCJUX~ `soKl cL\*\ټjB +Tdֻ[B]/8yo@L|אַ7@  rѢppЛ}[fS۪uimFtQ[<#aGо5jJ-T9|/\I`*{h CTIHU@~PNpoVp9p/ք`i~/Js-^e gCib-bMa, (ބ襩UbPڷPӀS7nyzh3%GqlHC)6PP̻suQ3V3!yNN {z?i'ʢ=#L&!{6o0/JeфQ -VwZOu-sUsTlpQiHmPYm@zAp\3a/hj܀+\hQg-kɎudKNf*Qit yPL 8ZB4~4&S>yC2%do gZo8X ɜ1t>XEBKgA+ i ZMXOWh Rm5!G[)(ǯ!0ow&J.Ac"`2om[DKs^[ 5@{(fCxm@ |g)lw2k (>z-5F Zիu*8Td_*QSjv"i=J/{rY9ܧ!?.G&?.J[M,FpqsqɊ[8s5cm+ I9+e'iwbL* 8Wd6B="nK1 X8<1X0^1֎1΍c|l9SXÒNi'do)Co!k*rr= |ƘAvj5͇1G>{͜ʅh+}"aHmB! =,* \Q^p=pY"0ψ{hpJΝtto+XLĄP!A#fTS!E=GoRH(5T.;ô4P6 @@-2q!R ߧ(E$gR.X=pg!2 fJ7*ɯVZ^'OKZ'Wȭ'_Ё֓_/ґ"0Z]ENPRԨ'^jJדW"@um]Iƹ2lr]:0Hi2\ ,?Gk*Xef+Z >)}U \5I1J)ϬJiÐRaq6aJ1юŽ?L,.e蔸hb`Feb .45J"N[V8QV)"I $OMJ4OGptb'+]zjf,~J?\rt⠵uWea)fco8&q9HLkgem(VLsY=2CtFq6Qy]|Jz}.6c1c_0"ZfMjDVϛ>gޔW}n0F:ƛVm_O!<:H7 T `E;xۡkpTCn$WNVOhRI5G֫)I8 QipU%JIKO!z`*օ@ԟ4c~Yet,m&*µS"9h:qP]xp\ -WgAZCCƨ5U&j1c2p.-pi!O14¡$($ERQŨRR4@Q+-X6lM֠T3MH !v׵I ,FknLCF qѸ3 V՘܌{IaQ923 dh#;L aڈ_CQ\1Kk>Ꮨ_ZxUjIxOnP8G <EU H/ķEп?挌J))=]^ ߎi/&LL11c/pӋPx)҂ڋ btʐXE%9*PD~N<;8#_FTx@g*zI*p_Yy9VC[hn\W/8\%v%-CCe 6dg 5DҦKz~6LDIܴ"EYkjYDB6F69 J (PDOOY`,sRT Rrٻ߶$ݥ_cs! ws &/Y|4]l'd_5)ԃVSMR H`nVjU !-y!+M$MbHMq#lr"R#Dex= B $EMk p~K0Hoͽfᨗڵ\S׼;.̬ՒK׊rAy )RȍPhs*ChM] P|V C?G Az ~Pi~J[0qWtV j+m5J Jcj,˸'ܱTB=IA SI[! jQF`PU$`sLs6*ƉΈy+tN& P0Ui>]Zۨ*W7cmT:MUh4 *FvܕEmm8@(yq:.rMѠW¸h]FAn뻨g'shhHhuWh).*=ub-T{LzEN'ӞgJ sZEdžx4ko7\nb޾CqeKӯQ++/>س- sa56b7JK3*Et"[j?D.#`8쎾YoMx?=[~q =^Bkh6?Ԭ .)"*}Bx~~̵[ s6^Dp.Cjv^aeiIBq )I6ۧ71w4n݂/`j$Bj oj7] Pb#:hiMnmH7.k2y}!D=ʔ!mh뗵\I!@NMٴQ|3X|ר,=%萸=zֆC6!VPD9- ;~\qo)Qr(vk:՚ 'p&ӧ[nx!!߸#M&+9nN;h]SU׵v /ڐo\DdsvsAb#:hE /ڐo\D)AQA˹~gOPk:$ !@"&P0n TmLzwUi$(Yn>o?s}rzZ\1Gխ}A.5V6x;Tl6!Ma%%Wz^Ib '`J$Z`^uVf> bJ^Ly ,Qf_yu K*=nJQ^ NQ^m"ALQLis F=?~r•G>RI ͡HKk%7ɮMŮjLLt}?eSOE񮬧Xj:}~WK wJ,_ߕI7z${AB!riT7ϳ賁ۼɉ}*`ΦSt<\)^mi~8MC _ɦBܗoؽ3`Hl|ҾF]:[ϻ.R 툋]M?[[7gSϟ tG\8uHKj jٚ;PAm-:iM$,!@l# lHw*ԐDw% *"J#KL(N!r^ՔIm+*@ 3BI4\tJ$יig֋7@ 8SD _'7ǫJzY2%yEobà'ϊ{Qnřw6Nx^ O۵jWF Mg-v$㌻TYDۗVO-ކT&tv|!qDGܓt> 'I\<,I9TT< 模͗߿f_=|Qy`V ѭŷ3Ɉ?l`~KownEeߙ,0~<hC* K틿TlLs[Dmxqˠ@q(W$1:?XWʸrVƕ2VF5AdqLgi=*8h֎3T'Z.ʹ;i=Ade{FOaf(iԸ|g6?E(A•{gZlW0]Ws9K+ޮzrުuə6Zt!s3Drvʔڜyk<]苂х|U>iFBǼ%]{ZӅ9CoBCLs\f(FPMA"g ׅ Z wu. !0*fNƍf&,K3%H~5рvTjGluߏo68j (G(q!,fwPjޙ8DŃi] u2qe2ru^= 1ZCr-3p I|5C+7U|М.=#'"2{fk`BDBWI]KM*l lB kAcZW.<΃ϙxw 䠦k3k(+zTt򽝌)Q6$JRbZwzk?8@y Z?nbSo"xv!-rKL ]ZNfF3WZ03 G":%>ZU%]Yo$7+TޗX` 3C`nat$]_T*A]RV*3# u7㟬9Y}ڻ+30aTA4w~96p=1zl  a]| blye9kŴv׼)PF CQG#l@` Lwl}oc 0P.7| I/hYQp ңjTF jyѱOXt"X ]lzE*)_Y$ qNSb,;|uhPM1RTOz2(:;"sǚa?D6H%Ġ"0\%qFM>͖266d:Ҕb^ EI(EQU*O5wŝ?I:Y}Y,%Kav[2T AG#[huI>ZTkG=Mђ7тڢ1!&'=;S' "#'T vZbI)R+ ;C(H8䖗-y$8 Z߆6g5;9 c'Sr?k`qAAB cŲҕC$+-+Ƶ +eXe3%D%`u7͙+2)ZXѼ;X'>@;g%mPr%L{gK#xVڰʥ(<,a73]O}Y1[N:L!^[.fOƇ?;_C@֝8Ӥeՙ* UX0,aN/6'6&و4@5@g5q/)7;i tYW=jk6wRkTG<[Ԧd[yB-UZ-8!(xW'DV1Py_={&h]VfYEzYhh[;HZ.h]E)<%)Lr崐M#/q&~n^k.1El!o̶I/hh¼#LP%FS6rڪęs Ԝ/)7vC6UB \ wIkH{]]R@T$YB)*\JUhVB*_I%p/l/ _|8_`t@Xy1}|/"o4+o~I Y@xy'x#zq"\,;>DWl/ 2;o83v .d z>XP'x5 EcpZh;dM*qu~z?FKڽ~o H6r\[!Dt}3Nӽz~nT6.aAá$Np7o Q7gr4HӭGxp2z&ZC T-G{yu[g%DJ9rF-)UV:ޅ78NjJ]Mf%nVm?x.+XCkJssi QdvVSpÍ]0vrYP*Boh9 Os\`n D5{Å,}q񍗀TþՈ+lc,.^{6~6TҎehYU8.D%81+iJc$7g.YRS%U{ntFX"D PZ^DŽ "%VԨ)B3vIEBKkSdBNU6HFԆDTk,q]ɐJͥFRHH齨*b%^)aP#EE a댦bRb?yjjқ~S@j>8GSB"y&V6"1c:ʸ%TS?"`ΰh*|xzG[[:Yi:4[ONi-SW9*<[U&Pobrqw3?C=&;1 -".I/~_W7v2;]IDy:[<O<-+V\qυ w絅 T3ESoٵZGa봹=}OQB^:`Kx:%.| CG@>{P-ivb(x=}|̄svA! 6δ%S=>=,FGTRL˩ͨ6݃rh7{7J)~ii@aWEѱ5Du)T|Z~7vx.#(EUFݠjwC &s :"hbcY+l4mʹT<ƳdЭ2)D<?tcn!:ST4"w,}s[}*\Jd^/AeKڈ@EL%58m &fХwe2WXSV/7_$ U 6 DUlz:& S[_ W-wWb :D <1oc7[ *pH<"!}i%+o>xPxnk JҾm"WK$_a惁[8xhIǥ_xMY}Qqeq7?:|IC2i ~@{^&νg=ohRh[d]DpÏWVEȆ)ڥNPƻ*ءgBDZTrԏh!Ԩ߽_|suMll;ːX#b.mgC)8)Me@B9 -^BRN峓,TW[sRW( @`фGI뢓@à(N«)aB?#L^$,{vetJ^.#8"i"N`J仯8)RލLT_%b@T-J| UŔHHUc5WJ=W)F ^+::+g+PL|aqʰX}?nq\Ď/,G ,6;y̛ˋoqjR+7o|x~J0kJ'ſߝa>,X?=I5c6@r \ebSR6fA@#{B*]mӓRkR*@QiGTD&n]IC^| wD+SO$u^UaʒB&L`+D L% ,K# #3RXjUYj9m¡uzU!UiGK)#eeCQU Vޕ9--')LXf9 @JF촴pBq~?7cOϿ,W3U5QjN>ǧ׼ ̿}wWAvV{su& Š'oOf~OןޝbsiJ><\_磀d h WoϮ %Z=%ȓG_U^)SOh${ZmleISjӺNlrCb^)kD5=TAp%c!Š(Zcbp ;8 Fi,Nj5c(LlP8P0ʢTU@yY XyxYCDG0-*AR'iZc R'O!ӹU&9?J\mݔmv맼aK~ HDm²'TEH&RBJeU(4Vt18Zp-NԞ<nbPF 3@MhRLaˤc<Z*iOjU6-/dVdDŽ>JwMUpJ!r[Ȉ(&{ˠ7N/VO@n($&Rp-V[*.D\ 9""z:]>?a ?"zjRװOWЧ\N#010q"g%Mz~]_\$_(50)$8jR |qJ٪_rպv˲_D1PIW:~zf)X?AgYަ&DHgn?̗IF1  )LBD#eu7S5n/ ۇLXߺU$UI0tMzZ"+t4FM1hV#h~4&P->3I[j3DC>+#/ʐTm$I+Y%TU6%)0&4In+ʫŐ)Cv4#5Tk`zVt\˘k!1*C6(Âv黅 tfM02jDE 1:9p?l|ń^KWDu_xԮ ѫ^cZQ_v |rvJBv'WFR SFf@kd Wij8XNg!O6{m2ɋ~kW[WY[M&'V0Eh,6DìpM uJ&lj HxZ4INrqym u53zb 1SJ5ܺMYjfش@Ӌbӻnd$rWy [EĀM A+ΧCWJE|d؈ - \׻P-\߻@yB);= KJ0V=lO vr@ dXѝ܄ vַ|K)Z+ 1É6aYy׊rkͺ>jdAn{XٞA"m"{O [-س;tB}ҩe|b^]րF!q`S͏-Y-9acVg_oϏ}4l 28ֶ[pq#pqCECM/h3#uG `9|ȳWLSTkS)֘5ZQ4Vթ'JqR3LD9Ayi3Ejx׽T &b]K 6ۇLۗZ˔o(&^ <19k^=HN8Yy_}O|Mc%?^~ˎkzsQiz}q:o|uuyAOůktӯ9Yra?SwO_@T<}@^ߓz~iWxgs~j6ּVKȫǕ8Ș|C2Zm~ߟns\5KЅι(/6zgò 6^ z7ӶC;ACI/ ЪbQIa(<`Of7]W5m%7y?^;> "wUA.>fFybDzW'V]d>2/z)[)iD;>cVc _$/bCkXS(\ `e,GF(>Z`VtAN@;5(oPO?~Kvszdp^gO7\FGe0k|(;]wc^,ofuvf^X2+_ C0l A`Aw Cۈ5켺 xup-~ ^x(Ю,%Ruڻ%' O!^7'f0P IvІ|QhCXӟD/ Z TE=>"2 eKW|?Omǔvcy^Na:e /~1?H_Q\z䅬4R=U͋D;1XՑ1^*Kr,v`-^_4K]uC;u[6:R5M5_th"cWrԑ\9qpZ;z*8> *c Ƙb<lG -]6JԮ;@ ~HVpPQĦ۞ B)ppYYAkbYeH0ƣA_Mu9eox }8 DѻXS5W%-YwN0UڧHhU,{U]@sXV0j F ܆(Q'GQk 8\P1Db К'\+0޺r3]gs!c+ ')IKhލ@E} K{im;%o玲(f*Drc{L!TIP a"1.8ʐB%MB\䜯0dx+s`70"܆ߔ26Y|U[^{Z+$,(u]<aQ+Sc]ɚ(+W/ͤ6ʚVW%BR\StVjU 3c{"34Ocͯ_ߖcV$*@A:;PnsLZ$wsjְ^5e*?Vwn3:oLobj1VΈje{{n9 y=?׉+ϫѧ3,vumR1}z ĿLgj_rӛK189r*$׫rd**N$KˤЮΨr-\nmxÁuwtu(i1=; c@v(۴J {-YvKmU; UZ lʉFQFCXY(8tclHڋRq +۽7i){L.ڜ@IF!o N=N7PE CkN][č-' 5z"fuZ{2ᤩ`t2/j>EQ Q\ f)HGf0IʸMϞOӒFыd>W9;rGb|0r/lddbi' 6lf?ǿ:Ѧ9T'w'ٲȍ=dtn﷢V82rBp'IMrZ^h>ѓJW+odԠ SPug9Q[}sjE-my$b,!"Z,QĄGf.y\@u\탢aChpb ҾYgQ (5}\ u+`:)#yA-aH9H}prVΘ@~u9yr%A$S'50ykދKJ[!$XP΅$ypA0L>S#:H ;^5#Xֹio PN}X^_ b #xy; Hk Z-1{U1TEdŝ  9|,:RՅF0iNIdXJGJ U:z?Arc!tl*G|ctf6o>b6VmIf=Co^ʋ.qxJfd)FeCv%{Cf3LU\IW}W1Z uS0Eq6g|8zs]b1 NƱ9S0Vne^4VZ!5R\q\})/ eOHQI4bcY*. z5Rq#}(&FF>MVʉ`f|( ^;r]* 'Ԃ\IuBNkޚz(Lhaw)y$y)zmETLr˺`nZߒbx ?{w FSP7ͳD )j@l@q˳]maE<ۙMJq+`{b"(<;ݵ/Ue'ݦŢiEYn ڜaZ[%,b5"@"3. "rbU%E #-*0Lu'6-h62FzvhlPn .d2:+6ȶAs'fj@Fzh,A|L4|5Ԃ|PfQL*>${a8`Wx׽T0W XdF4-Zr{s7D|#^sk8Yy_MFwxYKכ4/=9'?^~!-O6E!mJqR7.RbV?Q#@ZQI E_ȝGWS8]P@vs{Y|.~慧3\Px!,a~$'$0yP6 d#<ևPMr.[ѧDc~*7ҽ;قdp2iJ4Z=Iݦ61m)u+"nW_@>b.R4_0ڸue']CɎe<:/:qsfLϒni(%<8n^ӻCx 韠?"r]Ab{\|mV!$Qȡ\LIO2K_yt]6kZݘ݃"T_F|HLHUfWibS=$G͓kUX|rxJ2yeY0M?>S*()֩dK `\ G;D `\-){Z_^` w4n.Kw17^" 9 +Z!OTʅi ANmqNk3ʵ۶)زIp(mr-q&X.96ѢE%2҂Vt9[۰V񨕉 M>A[/.]{"-p]¦&ɑktWMzjfDr.a y+-5uu_)zf%: nyěkC0a؍ja?`7axxcdj?*6r+G諭@`s^b°ҟO)&e2XY$0@ THЂ"pL9ΠM&\BGX<o ʅLi#V) Y8O&%&3MNBbq, El+E=ױ{^5.Ոc"B6٩ ɒ $ZU[RBMÏ+=ER0֔* lY` OJаJ Gć OR,^P/bL^-}tmP/ 9ê#ܤ@G'#Bp]\pG&_}]F5c yjfU_u[΅XfXP-p,Dfg2r{.͟҂r~V+ ~ʔhv9NZm"A0 7;:Wh}l2g_kP{E[;vZʥm ݀͵bэh{3 5- ݱn.LYYCYFhY{g9L%ÕkPJ|Ee6Fy|J=Na"'o | Hɣ.E5m?vgzJ6o FpjvD+0*ml8~ߠJ![8D[2өqF>ǯ# 9cJ%nB?:aC;sGu Kg5[N_T#!-JH}&s}Q:-r.CϩOI??ҿLY1brg&wMK v+Up2f!C6[QB `<:р9߲};y}ua~mѷEŚ 0 uyodv=sr.^%>;_ҤB[t͂"rma[;Gn\vnCjjT^` PYR(5S2j2>8'=*ELlQQCjc gC) 1iEU*!(OZCSɁs l+&+8w5mG!rZSvb>$rT[W+SZl: ; 2;\ ƛ|eiuW8jzn(5iipSOt*/LbHWo5xî]54*1fȅϩW7v d?O>xMP$Wy<S9)2 @xsrW}|y۽MHsۏ),trWn\F/p{o\{ɏ7_.dǿz,i[5pp{݇z@>0mŴHlrrI 2}\ҧ~t嫣i =``}&G]Ǐ ŋv^-Qނpkyo62vᚬ*ޘB")e%&*5df() 2w[y/@,rRHǴ YEj>iiARGQ h]FÔ<-Z '{|Hݗ_Sr@z|ZK ¡JO~&JD0LL~䗟_LV?=ZPb/ߞM< ̒po< N ŵ̩2Tr}2pO7VyMձ S%aRѠZw|SMmJ47NK =ZdnxI1lm d ̍dn2߮A_ ,KUhdMsfMR&F"(n7-HX^UUAMk#IS6s#2Jnn͂a2|iVMo#ěX*u44#sgԨ(,Crw{#&5w9L"$[c;ugB^R-%oΖH/4*/Uk.kfA\-)̊g,&]QW?6i6;=2*YeC*:WE[鍽_VZ]ܫbcG*-u;@ /RMۑ*ǹ3MNՇ}S齸'/N BlPwk㳹eErbW^lIoh'/>m/ʫW{w%Q.A AъAML074C ǨЬ0{JA4A^1$wt*EL;`]i,ֱc^x*k8J3lB:;`iDqw-}| 2fU4P$7/w;(~ e[<am[qicz'i#(6*WG |%g m yOi?:7U$Z [ &^J1&d%Cj}9O>W(w}11 o jdJu6֪qW!_[z0:\#CL0 U(1}w6*]^"1U>Y#ŠÛY\u5n뽑P> `d 3N GZ6_QZ5_YhT6 k)SYM4B\5p¾z vHRm2*l+P=BJw5҉ıR㱋Rd|TUx!vHRӷH%"v@0&c4.:PDl :P#xxƎoS`(h&hiCkB%=_ [y }B^.p<|vݛ6@ЦƂU/+-T(:lQyfLG S[rhY P"HȜ:6r*9gJ8BKÀbgxGxF6c]O'ﯚsLdHgU]]]Zmy ʧ'Gw]wFLXK7_vIqen#w;|oq z%cF՘)ѯ5UӏކWnի^!uW{1M^gҮ={ư?r5(ՐϞu5:ҐӮնstdg}-P^3/a{WO~FvAJޓ Zjb{&Z#ZPT]KBrJ:_yeL%D*YQéLV y[\&ֶF:3VOIJV1 W>cۀlM -5u'h>c^fȎ;vޒd9~xLmR\A]y vw5du@yុc tHʟ(Ԁ C3ϱ=P(嗉^¸B(n D?f12H)PH4a\At9p19'[Q*20rNG|V#>frǙ@|ZNk4\G LH+ sT-|X眴AhCy VQ3_ ')sK*u(tTdrºa=XǞ!vj{[9.6a!p.ogne1a =[.O{v_]b2T+]f_@wC !^1dܫ^yA^|э $ァ~<͈۬W!dŸ'P:y\V81B%[ikD2"L 9]*/ƨx"T6xD!|f(%kΜ&$:l >Zw$tOcK ]j)Sj^fO *LTDuF5A+me`]wal2x-Ϣȑj.AT$ULL@ B/ö1ajm橄JtD[l;- }>(@sEEˬ' Xedr60+ O+Tln*.^xO 'x>Dl 1HF]x2GT2VJR;/|!2<(f~ה8"x ;J5,Exoy1C;8"76'-Lf'-q Ί%| %!Bװ!f2: ylQr.Ч X5: j'Δ<6>5:j3ôRd5uz[Z8DbR5UҠK(8L>NFo@()L%,JٖCNEkoFO6YJ V;'ևbx.e]d@K89ѓL|mfa~ן]ٛjrXHTm&t?޷VSSm-*oW0_YĻi(^_F%Zg?oVg S8_]E 5{,4W726^盇8tv4Z(V%sk&~L\fw9&dȧcb7|l_j!=題8r БEL Ž^'k7U+w@i&,ZR\ڭSݺ?Ȕ*)5D+uq>ųw;YPjTh:rA\(N5IU,cD4VZTsVO{hDuW{kwq Km8 ńJ"X L-lJ9-9bW~s29W=hoC5.\)Ja[M$ Ql\ I2h5hAt"TD33&؞X .A0-h  6DeDG([)T+kExIW&Qbt[+G+""0`% ǔ!L"HFSM'Z+(֜ B(ӠvN T"Eʕ<ܬ᧴y=Ng.kٔ~zwWWx}&pm")c _r Řq /v?~W|7T+IPys)C{X|[(}r%&̡]vem.ye?&`*ږgYx3va/wnuuTlP?Wr3FO& h#؃n@?׈;Rc@q4Ksi>5-E- *>m@2C=C?WZ2)4N<7l˭Ϳk2yu#I꟧bU1b1 &s̻K 7.eF`[e7[ag%l*hUh`}#s-k|:pe 2bm-(b#*d^Z1 % .>f^+{UYqՃ3s35mQ& yë5f2(PUndl åo:'8O3ֆ^!ѴsyԎ ùT)D GđPvҤ=!hrq{3-Z awR1e3΄ת!ȭٲ28^88>6huч)~1js],7gIcE K{;8\i\ U q\ h(XK5<}:X;Fն4cԑ vJ[?QBdwc;]A*堦[ WdAuN~C~\"VZe4rNYU[W9*u:p5 U" '/2IEI[J0U;*I5!ѿtBmP\n~S`怩7? "lyơ" "+m#IEb[Tc`gwyE#OKm҈T$)xŪ1RMuD~!x=_ƁМɮ6kS-W ̀3S+-߯A*z3a$ VB;^"~ }\}٬ٿ\ q-~@Տ.'H)7F^Rג]م> W 1D\L.,rAK΋E*`}8+ bK`ߟ/B~czzu[HCE-d 2ddLBVU~ &D)T..da'N%Y(DPߛrh-o[wcA9~ ѻz.Ժ֊bf 0FvGkyݱnk CE3i>BQqd AK2 CZn:.;#7edBXYϷÄ1p^׀Q#=͓yzkKx(0[laOPr"iAݖόt])u$~w92{ \OgiVq0{3ے&>.H1N3]c}%#F1#J9ʔ&N.ݭazyq}*}ߝ+{}6wgkEƸ>}e*g>L>zŕڰqTɃ@>jܯk>_ (@@˨BjpdgүjE:t(BR]%0'v'b&/m5oG&#V HlOJ9=rɤZy]]/6z@M^jto$|wѮl0gb0Y'}ˀu]DiJ15I>`mH* 3 B%/x+rDlR P*Uqٞi՘Sf pbw|cixO!.AZ&ۛtO?@ Pmi84^2i+O;NEO5 NV+\hj'i4}F |g>Q}&l/GSh@z?\hNo/!5gwa+dk~-s =Ux[pEuӛ ] ptxurf{Toq ?(Oe g\V̠ z,S^+rFs,Lx}>ӊټF[7n\Rnpz ^9ėv78࿺8*W"RǼQliLV Yi0NiJqE<,xaZ4l8Rf_FN`1[So5ZۉY"4NbE".5'yH)#6q:{{҅+]5234Q0v^+Xv'^2)qԫ}vѩ׺*!ƥ0^#I覐t3L2 :h]N&0Aά(@*'ɳw0L_ MRmigx7֧٨sz[`AY9$w<(z~>( M,HK "aM0RjB(X$ᵳb]z-i3 {-A?Xh}uJLBG& aN+T)hGxMO?>_['G'Ҳ\h23BN#3>PIˊ9)B\rԃ\81$5os3)Uycd|wMvH Ms"⢚X\X-8bLJ1PKCBԁt$z7H"(mG]3,nYxѨfs(^"VSMҽUV7K]y:CA iZ-}J1؀K2Th/L Ivyjl5.կvZvup!&ւ鷛 dQ>֖?7w߫puP$"B tN^q(*BX;vz퀂ׯLC; R}iQ xcc}^zG1vUmJuVmVo_.R{$z}}^=(9)'|ҍo+ǿEVm}Ќc)rGEg@ MV$MR<(A P:@4Լq_m]-AxfogiA?1kʩ$8rk#%zn@$9ɍW.)GIZ@e|.FHNN OB+^@' >*Pk93`ɼ4[A533& #- 3P†S "D RȀ;äDW<=tvyakj3 jfw9 g8cWiiNC\/n?O% ZɈWѾe6ԙtܿw+D0`F2:+]O/Mܥ+^o^Mu*w~ dl獐Ěg$TP!$W:$A5+(<щ띲gJ5BRʵ#тFM]76@w ъpcIAU6 :]OK~_ix[<4Mz~8Q%bcU(rO9W>^*Ė *=oZn6QzA#A#k@&,a8h?~oކ5/ _]s,8#ZvFgBxO}~|50;ǫ7{WvaTͤ>G#6_wn&%>/GE7ٴKn#g?_ 5dx8O'ݛY#>aV >ϝ.>0Lgp65֊*jǽoBՊXjkߨ/]獵3@w_nƿJnxCN9gT6qf?>qL NMA[{ bnMGMw3p3=EMprhv["yҽ3r /qq~Y,,͎jd{_bX{TyUf|<mm9:=.>u$pbPL }?gm)%-5(Tub`C7Z=^|P LГ, tBȓD-8|"&-}K<$}L& { |@xIuZAG8ݱ'3F.WY\fdbs陋Rڟy@OELZ5Fi[W JD;híSloڭSW.;zvh(^:Y͹0d_ aBt* ˀ#j=>2"KvOb3F؋n!t Vpv4!BkMB}̿_ Ê}k[yK # ڂw(@iUȔ:M!3%&T*pA|nG̫ Q>AC3ed<8.Td'9SgHκ+%0B8iy vDžWk6ufgʝdd`;7zŬ_j:yvuqIQD*%|k1|`|-y.p(%72x<e^L5gbr}{h%Zf56" H%XdR.^'w6Vm6 p nQR^ -c4E J~6RӐ~5ʴ`|`j_F*v8t%l8 ^4~7ኋG( P*/^E| j#zNT/Q</w7Cs_o\m˸sy.<& S,͌#2S6 ނw2$q:uu) sM6u^A݁˒Uqmu,"y8=Ế~ ]yg{F 1'3e^FoxY,Nd6KVyʨ3IgLHJOQ֧!PR]}6l;*-QFqfQU`#f95z *0%-..F`E^=MiMijI5ɴG!pc)`$$b] I.*T)*,d^3)WeI3 ڌ@ƃBWRLdN:d f[27t:|5RB0.&x9wg^BIJO;D+*X]LSSKxMeY%"˺.,%ᄜY]'BRUEuu!sRӆi%9PY-83ARt"3^{*TCPJF&Ղy ul(N/!@(_Ph+1 Y(cg.)ShՁ \ʼnʸ^2S=!8/ԗbe4K]щ9uq,4j͹%SPnl|V2LXeST+BK2Ћ#Ib შ%Qx&QT9χvAfw4!>fGel6Ɖ qFcF0Dz@DJaYJ̿y3a+^kq@󭆻X^=/񿭖H\CW}|Vw]v$|fo肏Ds1{x {!9{,<+{|u@¯o/0;dӏl)Q$8޺y/ݲ*5tS ah7۱ )Ag%HUPbJŖE%@}z/4\e4-Ԍ}4PjJ#z^%Ed 0<\#N2R-[Y"ݾ>20$A%JRRJBb4$*e8־ ޒb %Rul"h" *_?]; luΑFA֝3Ǽ%}-3Y$\ѽL#5呣DcqΠ(12 쭉-D9R75QVD92i~Hez]E!q> 3dYc6\cЗ1L-jŭQU7>yoǘ"/vKnq1&,^-gg?\+Ϯ,btl'"oK< ~Txn{,|&xQG!?wu퇜1 "ASI$O EXuVW+&ekDf4XPR  c&՞Wd(CGɛ/?~ԠHn@،=҉qn7P0aKi>6ө3&j,KubՉq<iDz8}Z5Lӵv'wljp Cp PƐ[&.t]>h4,1),'d*2d'[G1J][qѲC\F)y @HtedO}C1T!;ˡv1IdzL&TqWN7S;o1]FІ9\;(U>C۫S< SyBkε8"R/f^H-^k,kԓR-~WDaRC:DjtP[vZ">ޟhViک#Jja^?y Wlؕ)^L%xnL*w qfKV q}N`Y% '\Y 犆0e}0>10gV޾IFd '& $Jf*Q*x)5EHi_n[a{zߙtega&#}ݫMM"Te獞]xs#{L=b *FJh]WA(ں_9iG`~!e/࿀5DGH[9W;Lk ~i 4L&VO*}|qqB,T]T*pum췄>n>+)>/m+IT>e-HJ)9}_ 0m=e& d烇H4ۇ7?翿 ]uLoyXoƾoFZL蝳UD^TH6%GKQ\GWݳvv6նx:?۟38>6+| 7IRF2Ƀ >sҝ8;ϰ `t+f7Dk;V;(4}_i"+4E8N+0㖔! 1Kd )V~]Ax&mǏ rUD^&.!Cm[l'Z.cء2vh,vhY=7!d:D1/s\Zy(Pʘpi\ D6kmu:fwKB>ӿ-1A?1?X}œG?^rP&ʼn_?|曯W:beg/R{?q_v3V G07) \:Kv0hI`z``7dtW (\Jfq=j)*x~2!ө3uM:Z42n+< e!g^ +l 0KfdR1kl \yū4\PB qhsZSjUJO*SFPLcBN'ʀf`ӏ˫NBalj*L&OZWZ녨1#hAA a?iz2ޞ7>~sJB2銜S ;5Ag&я^|'-"̖㿽 ~ X9+W=FNtl#c4!_~LT,8' Ti䃝=koGЗݡa@/$@E)$ew~CR!g(K#aOu][G[4JBS|.]L/n̯& 9ݥ0 EE]'覯v,J 8r*yrh-翸O ,SXΙk2w;sef>bց=7 ?)j47ahತo?o]}~&jp97%!,mt$; TŠۤѯ ߌǣ]! K$"d[B`f,(gVĭ]JcV?L&Vخj)mZ / W|UhlI%)mˬ3Q!w@Vg)^׿K|=ԓ]UZy5;_ncQ4 @~Q7lh)^4N:|O("OI R`:0Xdps郇ji/^/nl`FČJr.E09G8rXdjz{僃8ށr8KZ(hy!3+7tm(A/\hfYXV·=Z"4 Zt[yz+!/&C&Z$!'_`T0DJy 3.< J1)c^0-6CEqِVtf! Pg/TYؖrݸ0V9dsbu0d+ V0'RK4jKk. cAͳ0[L6ڃ, Bd `i{Wmϵ)B5 {Vv}ߝ~Q= >I۴.+W)5k ZBL:#z4DA)Le$ eR_~6o-]9C-,]9.ЂI $^H rxq$ar#O7e$Pʕ<X.c#0F C!L"C`N*EIȤLwAe3#Bd$^B(w2 YIhK1BЧ^O3q!׷S'܀1pD+ed0y;3i\ReHɣRI]hR&;R!I6eKƩ,8l(@i. b\MBurjHKNyPn Z|S2פ:+0wB~]`%fRMiJ,5K>DGǤ'`)]HD܇QWO.\M_F-}M5<+n4f~NN|ts~2&W?;{=#7`Ǟ0 qndfVZ\ ez^Ί}\s-Q g+4%>G,`'K BV%yab3/Rv4%9{ t:@o7.>B[`POo&sR)T:gP7Gsv iGpWTZy2a:}Y8A$Ѭ|T1%ۜˉM_\^M>U1(_z:DΔ43!Ǭ640]j>]樍8ԼZ= P'A]$<{@EX\4'"0/`6{{FŪ(>y{Xe$٫k%:Xg)3F|eTuʑ\r\܀Yaρ6o>+RVI@b\۬FS92+%ͳXY!\ESv@a 9mؼz|Y[Ka r 9eo }l^qwpR%0 %(3P 6k) l’h5 i۴5C'CrB,H5TG = d3D/#;^5z>%SCy["7V7l'f^ Qw)MGsIO?/M _﯈ P(T?J>tXw5$06ߏ\?x{U=)XWy#/'i]?y#[/>y+n㛃/gpVOT=ӛIt}c'p+!|Ngn|꘶G{ř곻ş64zލ=7P-ىP<T%F^ڣzj/~k8Sq r/_V_ q0"GLJ{GK0dY&r/wm-dX#[Sfhm 6jF0rNm2y!G1+yBSfVA2ya+iY:`db>aFHTRf{*S*rh hRV1K'|oV/ϲ=Z!JgPsIpV>B0LĒD VyL ݔillh%Jf@;2s2: ZeL9()+ )q[[6+# I` [ 9MbRF4@HV6 Qj-~+ YT3ĵqزŰW4}pX{dwnn._by`@1mkn|>V'rIi޺ݼ09%+փH8|5zw- pbaFlrȐ yc2-=fK0s^Ӷ<8dҐ9~}Ԑ[8r3?6c6."F>M.sra_S;H>bRg6V#ĶS;wܦvi<ϣy''W|fIHXBd 他\ '8[HF!IkыLAc#UzFMz"#G~H^½zN4{DCFrͰY.*o#+(~6+p5< EP{)| =c^&\ٟ-<i[6)*k6e+(ryQES.YXg0&ZEm*+RE#* 9Sd^(&k[n!X6^W JMb7"~17?ͻ:F71CƌIm4F^`cD|VF<`Fer aB"/7f훤 ,Imz wߺífWYF(NQnH(4p8v !j-c8~J'2'6#O1rIX!AZ$=!O,d^݁>t56k?FS[gy/8$%?]b0@.ͭR/96h)}hOV{1/4΁|rWMOU$ zFcxcB [vQ2Z$iL|[ " :s"Z$8m5FzTK:Ëd4i׼|RK'ؔDUFI1ֆ2XrbڡR@E0 `TL%0ji(;&ݿ8'Eiq@:YЏ !og/!#'tvָ5S@#U&tN+WJJ 6#h_FloEQ<'ɶXƊ(,΍aZ=/Ņ=VDB$J.7=ʂ*x f%2-r9 %Ns,eԼRTՊ0Bb'ѕE\M UK6'_q3DR$#y1ĕQCDM9cጳRZi[fURPOkI*m*>(1*S`J6v@% }xJEI#3)B" =xvC9\X/L&U%aH83JPAX^} xuzX;RFKﭧ{+J%$WϏr 274RG 0{>]^ -w3?ճh.do@ )-)?r crIamPxFܯm:`;]O# ?^TӎW8'-QN{ӓ?]^nGɇg?n?,_ogW{~O: *L;9Z01P/Z.֌V=z_4zJmҫd.|zcxjM~sUò;/ct.|=sოV %Cr=GPB:gAPztp OO)UZpfYWyܫS bS |i 'gY(l1yz 'gAf}C'`4"Qp1ɎU_W"Ҟ)^u'ܤcٟ%m`]do_ũC|()zg+I~K/?L8V MZ!dU0 @jNڑ|ɉu;v!\RUځݟLĕ窘j2rU*fJ$@cvrlrj0g' =4hǙ\ԤB)w}h& 2/WER($ObQD3<:dWi+ejGtCE}:n+nP|v31Y@xo|L@ fv;tl6o9Y'O'qz De &3JTׂdKC!Ǡ1.YIzQ*)Snjc%S]ݞ6qy=<6op?(5_L{A,iI YblEEçc77g//0ɀK.ՕYu9:MX LQ50Q8=ZZ+i1Χ&3_Ƣ1M~9^pn7J}7G/pbjogNcgqϓp7`3q[$}8)j%W(:,. ~ok'tF &F(96 vox6Fz^k$vk 2 tUktFl;pg'[}ţMa{lK{FJFFZtb[_m ^VMc ~qwKћ5j GOQ WYB Cxc:%i@,}xyq{0 ˡm 'F;6vmicM6l8& BQkpirY9h.#S1l6X띗lإ*b.}DCJ6n懥$BĶhb/S"i[,{OW;q,~\}d?_\o9"9i\mWX/-7 /Rwzc L2 Y896z۫՗ `0r41=JoU\pWgէ_ǡNyշ_ ^ c: ۪fYS9rbDo~x)oh|b:31~?AX]Ij5sAE͊aNrCkKIԺTZP*D}30S(>QtŖ$_"LEY`$A㻌 㕭t cu|& 5ǗTrN*d>5J\O12{^^6;E{HR!qG245\fmbAL4wzJmnG4Kf.eL_z. <ȿ- =l8ϜΠmEÆvgٛׯt[v0AhŅE yu:m)1 p% cWqy1wB^u|;o7ߡ˂P"]]nƨ!A,tlӍ ΁V5z";V+⯴`Ep}ݏqdm%OpoL߶|zp-#3q̿ŸU m4V \u/7h'8B Bdj4.a'JGAm&QW99Ec52.?f  wݴ*Cјw; ϡC͍P+%=ײV4@Cb-P,ЩA+5⃥\1I]Cp!R#C^$#h yoQ*B#JYQ0@R[KkCuUIBK.p>_}U!q#9~[j5Z"Iq]T߯.ȾWnT ߞl nTnݞ%ejL8]6c۫7wuzwrپA7do,,~!|4^uʉ|v.itc߻oqLjb,s/>yCˋ,'7"bb+wcD»bb:nNjP݊ݺDlJ#]~n$N7wػw+hwB~rݲ"PjHqIC&)oh+6I4 EknZ*&a̬#T{t@)WA>(15<Σqi="=UbTkrUSZ*)T]eiԀ[VnnNe)-WܻjȈ\rОLz#*4Cn*\XnRIy;<,pWj&Lj{ńu,=H\# J) DKl逮9%} *fD^L9fa=\Qhx̓@K zv2 C6nfwM XqpGW28Xz!6$X1"dNqp*S^1=GFͿS~d:?;uA9K& 4:=V0cBESZ㘣 e8!BOfį N )Pt`ԼRT TDjjPJ1GxNk+4RKΏ|;)b=!@MfB2XNu$U.X^,PςhwKȎ-'r3>4Au:rZ P:rZґ$T(h{6L%u!;i$ iPknO]x{3umkuƽXǷYjXcA8KN|u_2O~c@~ssb81ہJc;yhD)@RĮZ(3QH)\S)Cأ.X*yQa.mc';/ XQu^vd-GԼF-kT!ߣ #]$;: bMp{Vb#ZGD#[x4ǝw#NT = ;fQw2t>nuO%JKt?Pp:'uRS0Ccˍؗ522\be&3bH's2Q 9leNĞɏɓT61Ҁ(yAJ(\ME+j &N:JXw'3pBO'uъ؊AU?Nց$CM8 IwpJlԔ <3]p쫝p~NO._ŷϐSje'iigmK׆}WSD &kb+&A*hvr8߮~^\^\<2qMYeF_";C _hs53u% Lgc&\TcjCl[J5%kQiuM5E-:B sNmDl4U ؾ|ϯ| 3&5TS9/n=@OnU0k JujIYvA(! 5ڋ3c> 325@85.3eRq '̚Zt1AjR`&`*\o@\ޕ6rЗHHWw~!k}f]%;ɃWMRz8!)v%{U]]]!5#l n1ynwFRrq]j4 P2weղdWARWg TTk L44D\2tʁ0a<@"F9lB)PӴ_y@d'fRR@m:  Π+P\bz}{^dtơξ+M9ܗY4Ng⋓)*_/Z,mng3 X'闋[z~0"Ky8ExN8 1 ٔ<2<+ňM !$v"M Y/YL;EDL^`̧]]hݩ֒)}*>{C+D1h f\a/,mQZc7^ӆΰ`emd\ i(p:Jn2@@:= b e#B*'ВaldF8afyIZ 9/Y!r𭟥rϲO»iP}lU]UA,AVmFZA@TuNPҥB䰣b:JI$;&s!L&.UJ;PKNdnP1`{Myv;OwK"34ě`YoyCiPԺ*a܊#lZsLJp$}]K8JLjT_w Ƹ -qZkIrݐsؾXLJ]~U߮~8ZUozbQ^Tzx[WTlqpqʭ'ӫ-ɄMuӃsq3=;iQrRjO=އcYޱ2ń~BGšUk`;֞kk$ ?i>~8LGfAzmBi7{ rdW8taÅ]wU;m zWpi'{Ä2ƏxC xz$xsϽl0෵-%mmRk]Ҋ1S˥]iJ}6C{5-:uv1.ήf5N0g77ڃ\I6p7Z.Ӊ'@"][gl(vYoT\ϭ?S8LҀF窐=yFIJͭRQ)aTn;҆R+|m,r0öC^:nR+QVc/&MM55n jUXsTLzHfM#)}ʎ} IT0ӏ1`.ꪐ|+{W.gPpO_ԂWZ[{W l-^tkӛ~B0'3p+4w{m}L^A}/C-,͔T:2:%5 _zF60}UF+4|shsԓ*zdn~?uaK׈ c\Aj,2J{TFɤ:bLrVe4+mRoC&'h}aYl9 ֙}dAw虗@m(>x(6)xoU{q{~LL4 Ѧty 0.b,Fֲ6ǡ+%/ K^YٔOA (Bm ϴlU=dl+]"kzKdg~k>jֻGCR:k3X[fdBx'-"@ A 7lywR jVT"ykgt ŕAdi7q+iwBK,eR%h髳g6,f:=?kنF mQUVh}0 $By.[e@O)y?QAEՉdɿ r켾u6_grF&ru}uܡ, DvRtDrɄ$zm<3+--׉a`i0k#70Jn>iP!P4՞-wrBV~7(PY?ѧl?cyL Iu)Fjô]\O᥻p4@4-s-0S"kٖ7ڸD:1JAG6!8mX֓mTWBDɂp,LPcr[#Ң_>X `/c9j VbcY'з+؟ ?Rk?ӪX8a)d@\?0P7h;ģM}z1VcikO9R׫W%a{+7E%bB? DArP8Ye!!'jQ okd*]"f ^fٔ$^8i,0Vu)IbT\P&FUJ*ZxJ )WL\0iW2ʮJ 4Wpܔ!9h̖TwSp.Sisn]j]#Vlq#&Ԓ-@K^JZh7fb Mm|hbu< DbU\nThɘի2DrYIнdW创΋lk|Ku:.%ջ%N(vq!2-33CZH@cN,_!\ c~]\jɚ҇8O, WV]Q[+N8C*]o(-I_SvF"μ-w%&<Ar'&+ ;"0ҵ)]Mh>Q Toł7(tɹ9뢗Iq2KjGf{gJeߟ%:iԻ_ m.ԑ "D8c,jʳ 8S9}DzUPBb'PZpe,Hh7/! 6+Àmwyu-Rzz;`4G͙{ .4c>D9e0kݙaĕ6ɜ k&y 4kDUQ͚g>k3TWsFC.XwXi: *|Xos2XGM:w@"؂5JՌy-em_Ī-#9>Vkn}mB^!>5uo9\"u~hXsq kqB@c+ jYW䜐M%ܜ3w /ICȟӗ_F^"~s| miN_Mbr&srqHܞA_}ʱPQ֒sZT6CD 9JYQ_{t2qe[w/"igd fo|o| vEDjhKWHIS:GxYϡŢ [\¨JS1I0妴V UKl)諥d:YI2uIf1*PE ` 㾤 |OGЫB?3Bǟ3|:dal&?;]"e[zFHn po!҂BbFu禳Vw + |WI!yᄍϗ b߿^o?}~ t:Z2`D;bm,)$2J%|!mp43jy0V@Ksa+ZXFHB^!fG \i+TEOɨqDmz90Vy[ =XWs+W]pKK gi@5JIJUXF`f#qFO*I,%>VlY-V$9 sXI)qFr)BYf!p,RKjOqkDkɂ 5m`S}7hn>N{# r߿. d0cDn[dvzDKgb̜!HRn) Wof5t^^z* TT̲T Í"@Bd_SXc1P& Oj%#*Q,sHx1 9(( ggFfPD-Zk#YB j6|߯`e]@!COoLF?)ܛ3Q;D?>|"pm?Wt۝ ئ>݊’ftӦA;*3̨ ¨F줴Qzg2"9 !aR@Yq:Xpi2hA'pJk L f+l*.%V-{wOۀ$'{UCڔ-VfN8EBʠ"Q*Ҙ.r[ l^w6e"0ň/F=HaUD/plqpˈn 9ccp`a>i]uitV{}Iq^0X.aʓ)|s<VC]/ ԉ}v0wXS^Tp)(#R`ބB3C$͘@"K؏>Lx Lr I)wU_t090)?)Xox߅K:=_+ 8}43;4^hI)N NCN= yC3opR1bԿ,l$Cf|,s$L@x V`GLj3Jq*¤l2WI-b!WbJXP%tt.Ai< b%Q%4 O Z%Ė@Փ{Qզ,W!@wVlm=[Oصu[FT|VĘd\60`4! "d9 Rq)( 9zF 0L5/`ĵRRW腐 }%C8L4Xkͻ{K[R" R7/ uO`w+EBR TS`'#\ \U! /ǵ-L)2Ŋ@ ay0hҦ{e'PQZ]#==sr!!xd;͌P#xE A,::҆:~#N6&k.&@KF2 '$OvQ-^f~8!B~ƛDqB)'ZߒrR]ƶz \Ul[?[yim" B]Cu h MO`a)w5\o}জlLgx{*ȸ輿yf*o;Ѿ_-Z74-Ӳ#]+k+?dYmFŘ7:2FGagZymL}|pf{&)Ɛlإ>he4Yc uM?F|HGȁl!]:Ŧg0-$:pc~5osυf|[ +pġ1=CWFaVY9@`I8(˻R;S3XNG.>pj9M>8'7qFH9ϽO}˯m'kV"Vuic:B6y#uy젛*MO7ȉ0%6JD"Ppp6 O) x- t,Ρ9@aZR= 簠*TcZTb\{*5=%k-hŒ O /;Mj_J^?<'5ϥ̗E{o&IT;D4 ]m^*4\;Lb؟jR#/_LXl$]W?R/ "ݫp-h9~ϕ&!vuQI7 8lՈ<( ew  D ^e cm1 !+l{-NՒ.dc0 ܄vr0!Մ3vFoVqQ;Zdr`"}tRXfhFwk]ɩlc?N^@z,jw_e婹5Xo)JaӍ ́F;qB[\譴TDZPB(3Sd`}%9 [P  /'4WR E'q߬Lur*AQ3ߔ(dFL./3@!ՐDW/l$OCC1M6ژy|L)wϐ ^]a<9hR¸}VR7wZ0nz7736y =#4m_օ{ZJ>V^>.V[dtzxWFf|8EgzR;|sn-_} n)T/[2Q],s6=j]hx/j(sn'2 +F{֭ݫo H*Oی)xsnk*̩j(_kuR5_eUpT}p:q A$&JFk0IϬ١3'/xg :Β2+K"O>KmоԂB붡Ғ qVsMAx* ̢Ǒ*K҅E:RӨTa~\T<:Tc NpRv] Ƣڕ*dIFk0u!*񸶲A5(-T#O۳uRR;K^_nKެDI.܀0zw3sړeEŋG5ku@,S\ "yPzwt:0k9N5-L)5Ga=Q+h 8VtH&+('grZ)dBvȜ&]]E8U8 &XCvxT! Sl1~T9 yǮ̳[Nn Oc7kS./W5x/d lq>?XZ; ]֖Rx{P-l]n g/ "//9|forA5%o˶i1TbLq0Mh)m֋ $E+B,1 <@[4=a1.I*RЮ{/~C?¼` ccEװI\h˷t"pbt֣D]'Nu$(Q`,\9Q0ԈŖ6|=yXLƱFd@˄`F,neFq}ɧ'/xȂeɌi2` BɗSZ( nfPM L#"P'ml[(f몈N$ZdȄZXzT$VpH(؆ObpXp| pJrKFmEKEL Y9|!(A@Zy.=yE(hzab6ֱMb%bC$.R.(EѸ B{FeT9j`=V/",Vo\BK oE3s6|s5~y[W,&`"nL6M'7یrG,{#UNj1f!=G6b، 3i`Hc,an NTiEWSuݟ׳ nc'f;x ,z_?=~%lH `T?}y՜L, f|WfX 3s{ҿ 5WW`Op}_aw3/=}Sq,jpcA fbc<6TL6 rw5[`J6^BaߤUs x *Hj;Z٦h&a',$%\ny)A*I"hyM@OonFȃ,-ő"*#1`;!`I{ J kc!`QJ** !BW?M@EϽ<HB)EoV_iխq˃ @d aW3xbLE b[4LSeO:tF~E:8֞n&,^h)M\fe;+CM.u~R #Y10~b&s}T]"B6сFvxdIMM.k5hz7IVlz Jύ_\Mgnr{h:}/q,צ/jwp coRhTD/pk*Y``XCaR}?Bؕbq$<ȘN1-)PD''԰GU)F/j-yXDSO>bJX /B\ Zj #$r$5VX#VH"wH 'ĨqPV`c ,``1/Epz<V>\ɠGA@ǖH{T[n֑cA$yxBTK8qSnr2pk5rbRo%AXÎTS%ԑrZ1Pb"bzc!.t^HPFZԠZ1\eDK#kH,!ƒLznSQ@NT+F4=jqZ XI3u4,P R2#$ ^ϬTt`.YT)l5F'y9B9^b9:Ռ./ji:m^B3Lxf-˖g߮*(9a-o3yLWL̅\JZ@;lxpigljeR#Ye`ZLGN2yjm͈>[1rv2ؚRَk6ɚUo7룟=s=ͺn_úm 98Ϩ.|U4v={V(IO?y4y3?tK#$j [D݆psqBVŨ=Eł:0]}U\,3\.oE·iS5ObT"[ş!@KXn<ՒLKs)dj:S=UD\T崦2j&Iq@0?2Ɲ $M851;:L*$Jk^Ձ@3\Q^NlHhX._j@' aF}7NPg(cQFɴ\y 6/ r aaV/PP+IyfrʓG p;$DPaAzJT'\f<0mV+m4 0W/(_ ;egB/lP>2UsF{: 4 Q;q۠B\URx1UVsVTBxd.~﫦PE2exqDB|(YfpkFTˮEA`o485\Rr o/Qh]K^|SZ;gF0l*`eT#"K-=l@jxOWS{HK).V(q\fꃗ 8*Bw=6h?tݱ3v=DX38 sKQ뎤`1:0 (W˼ݜ iFsjL)=8+fPN7_}xU)x3QrD`XSmre֌֨.z[%`O ]z'Z-1*q ޗt*!_ak7왛VոHϒ; VMI泲R>/S}I |R$%YjzbT_S/>ą6u*ǥI'U`^ܠ̓ܠRJJ 㡓R)r o/ L(+<<销35'=Oia 0D4CTYa,uk WvJV0tDvmdnL^}owNZغfa2ھ./k.7:<|lA|QBOtrÇyzJL0Kw>߶'IF.5`z_$QVN^|ܹ@/޻O&ڨsQߘw2kΙS0O2zƼkwήOM5Nċl|E&߲O.?ؖ;^[Cׇu?M$RCakpi!i.Y,)=@4C8bJ[,zLF sQpt=v8tLyRùU hTN@5 MN_!4JPB\9 h7t-١E[/유lL Owf: JDHHRn=ԜPa"n(Sr} wqFr=; ӮwY>%XIXΌ 3zz4ocO>ld]X,O! `i*%:KJ-.X\Y?b:o12I;NC3#&E)l)%f"U ""(Y v0G2_X/Y*ʤOQ??3+䂽ӻ,~Z@'=ZcDH9\%Fx)FQl 0P1F~*>7TL(4Y15eZ?4ORzn3/p:5 jf8eU]OҰU9 Q͕,wW6AEQOɊ:nGUǛ LU9 NVzTu11sBHb= *MtnW 2Ugm.ZkNuCoSEޭJ7ƹ%y3@PBf!K[%v:6ק)S0M[jIM/n/47OjjW?r#{vAGxXť2,}Jm}xZo 9:ۻҕ?xfEol%z_UXכ)k\LՈ_%ɳ=ɨ㟟 3x*V@__V&$Ũf,;7^6u8n w뫃vp@!λo݆Dgq.b&X,6 0pxk)o8R,NXقjM̂5o[y֌$Eq1 Y|6;W#g{O4NjxidaHҸ9`~dC:5.6>,?7{Wz6fBR8B_wßSXz}w{oPQ?{_Ӥ'~r(z<@;jP!M_ Ea[ h&ASՔ[^Aք gUP?XU2!}% i ƐQ6"פiKMإMRFlP%~n q}~\BXZ#[/ڍEâxMl-gޗ^Q#R!3?1Qtdg>B upztiV%0 oUbo׍U*}~g.+IA`aO2(uX)dMNpv&G C|٘9꜑z{>Gmr9^'ROskXsFgB9K/ezZLOz8V!*: /'k  tvWLrlSH՜Lkq'&ӓ%p:C9ڱ' S#ؿH<.qI;Q5ho ^cTj>x;ITN{̕@Irx?tMv)Ys4V\ܿsAV^z NKWa0܂P@""ܬ(hMS.#ۯa|]H0e-[;YKvn#cY;pnSǘұe+PSSL89LԜFǖ',cZ7&C*IaPN0ʼn/K uo;鸙 K{9׀-r =)hiSreO/?c I GA 3 ΕꙒ~!7kk7Fť*sg){mY6I9m5 pm=HBu KDdzX|LQ3 λA4%~9=7N|gn̼Ш&,(.!e2O+RFԩY /;7@Dwx 8CM] Bq|p␿_W+XHţvO].;Ow?E Ҍ? 4[^=]~…>Ki7[ .S˥ReJymXI祴 7FUŪ*u%A],QlA„DmHJPN9ʐ.D*peIrWFh ji5 btZ)@2&wVGEĘ*Y$KB@fիC&oZyKEȂQ@f4!p?|Q(iYycEJ3xXV/TW[+4DQ4̑&M[jf%zV"ӬȓdI}Ӗ+=k+*JT7HOiI}/u:ʕRi⴮/k?&;JJJUfIaVJ_-fD+  Jw;w y\W}4{*[òLYazs1tm>r 3)DfcqBltpwk$T`BQzV (%QqLs\Dڹ )$<MzV) #6ίG?(\je}l畇k\ӟ=,CQ9#8^ӤfNϚjbl]wkiԮZگ=k_GVϵL8{Mb99]+WzMe1i(ș9xnQl 0[R nsSǂy !H }ZÑc9<PC:B¡K2U(a'xۄK'\N^wZNi3%L{7rK&) zhuZ)J?nZRkOf_ן۶.ats/!W6J>\a!kln'qV{o޷sUkvEo)[d(2 &FM$,V9p-oU ХAvgcB\L X65Vtc6ݵhHM*/+NڲV>/L$+! = aت#!yluҌwMy~1 gYzfƱ:cf"{aVq]߽Ҝ=NE:2૴.9Ѩ_WrJvm5n*Jut0;B~esWд6-9dE^{E.s,wF~W]5򻮑o_?_|\PDjGG_ dbYE\.{9ް}1Cg+t{w>oĽH?$5G݆i 1 dKټ )ڔ9f_Py2b$2Tػ!ST4H=Ua6׸IqO!&V?(jլ=6dlP5P-+/@D"p d7ZDU FCJBk54lD_[thuXZpnu7 @;@8 +1pJn\Q:HX sYΰ\!G(f75 PiIFv!̼/K NzJ@BT1>3b'rv9G3Y** hg.nR6M\f: d\znd (TV41VÒ+hi%X$es\oɇƐ_sܨwK-T{nF"J~8W6ݹ~ozC7h+W}~vi+R7@Wgzg}Yh=g{xaota-'^~FKxnCBp}G\G<VcIZ۬BBO*?vggm|Ce=Eu?4QDͣ{ذmEȑq킁Z[U x0)VP*Ax|ؖ*k-/E.YSv S!6y 紏`qX B ?tX^RtwrnRB;Q%&ۚ@|(\1xP"GoY(Zy`rz?z_{k 3=à?O}\K`[1j&kN}߾TԪ=STn>BaKE}- u2uKBp h-]l̈́7OnbLA_--+:o9gY5dWByx*VY#%щEtwo>7<`=~Hh=>Cs٧:Wg_tmT8 so1=j| z >KMSn!6h.4E3+4{;"Km5'?ߒ$آꠅr7/3 -z|ȧ:rSN~ފnNw#F:hqݎwUآ߸inSx+wQ>E3F$Xo nǬ"|m6rۧpWWW>$K*B34PQ7?Z*$VhTX:8:c66-(%" xfI aZw$AO’ :P(ғ6Wjs ub"}_rY=5p}>u~/3,W \|^pgb6b\:4%i6ʁך툅|P~E41H~)nY[5Q-fucTA1F3DNXr8'c>]!i=!+9 ˻+g 3B%X*Q. T\̒wU5є!Pb]M䩸], F /m<ksUmzZPfB8=GVn%48=nԻȡQUbcUdk)f 9D8!,8pB:GzF=Ac`)9( #`K0A6p@ \ySj QɪQb2ik ِuA>Wl,v>tsN~Y](dO\mB_5L^B5u0LJmwK=t/<Oszc4P-vڵukN*T{ c?>C\$Unuǜ% vXHhoni K|,I=K٭[IYT u)NcDϊAf?#Sa'WCС*ɱgM7a9Bȡ@uW$J6h b$!Uujdm7l;mg7{$\E% d\Rڀfo@ )K@10L ,o+ TV* Ȣ1,h!..RΉ0L A2yɂQ0E !%CAԬdg9 iiMNR)jSj(ue#ہ~codd ӟ7iYչj 5ϣ.>KAÇ-!*k5N.7zp{#v<{LR5 8X%AنD`IbX8eBDab걬 =Yˠf':=* ,Q/ ,|Vw%$6 =׍) Aw0yuQ.r$^oc~),g#0%^] R>j$CaiQ&ie.@|&&x:kQdndA]`XǞ'Ruggۻ?-$ݳbېQ73$P׋&{R  ׫}j8|ӻ weM&څB>j2N?36k&N 5 fz[F 08eeG"{o`XoqiM4HNHoǧ+=v>@^nN?`%ha[@XK:Y8%tp>ͩ{ћxt%:ǒc5icf!Y'K"$#Q'#QWzM\I#:zAK7L`8FksӍ\JH1H "sAbH`])]|Vry>F!T^d] kOR[C@!:'P륔FM,^ng&:TB[)F4H2T\(͂D R6A8DxO }} =m,6 YJGp ,AW?'#%Dc"ZiHk ht4i kIIgW.*\hclU_*A`} jDbiBj Aų*tQySjmI4d;yb)9JwԻȯtIϡzԠH8ч'85³ @J##KE}p G}e8m+TB%13b =hG<Kh܏%TZWKŎ 3 㵰f/f6Cmbkl\)A{vZ0pajw sCST3pzP1/OP*bXHhi[o) *fPO)Mdiyzv+cA N87*f>+*f4̚ 펇Z}Pw `@%F%2p|#sV6 a'm;uH:a6_j[CUF#&w&>Zh6iyt І][sF+,do~qRu6'z[*`f`$I忟"AR%@wgT7ƙ@ƙFƺr20δ86X1L0rHؤPHv@V,_+Uc.h4<$b/\*$6D mH͙ 5/+$WVI\N8&Mf̥3"IDZ/5g|ۛ` 5f8610"ø‰D' (`"*aMeCU@j <ƃƃX^6YRuS*b[$|h扛N4mn |`p_fY̚7XPTmjVC$U,~,,#~[+wBa*GOwTh4uIOҒαUZS  |OS<"?̻8l?]yWY>^e%k/xnw>|3}kl~/0#~O~X=lm׻{o:B|xmoEv%Y^U3Ay ˲5^ ENr]RB0W'bbLbZI8Lb`G9Rߔ0 ^z^Eb fCRߔ#e{)Qa^J[( /=$ͶԠz-yKҰ:\#2xK/Ky`^}ǔRfR D uY,JaofP2RB¼+QZK Lj-4ls_K /"K35*[^2Lg+QKI}SZa½0/r PJŒbi "[G#.I8ڲ;pX%HYe/]3 CB6Iœ(U,DhiB)@vXC3>n6qYTy}edddfT.dˣM[ɦ\bt50w}4ٮ3/l~x6o=xuG!CUٝɍ}>f3s 3o~>[Y~ &0K%bO\yZ0,/k9i͋gڶ݃ŧmZp&tl.`6HXr 8%6J `]#nߋmWOtLP-69}.xrز$}f=&Egs!<8Q{Ӟ m/j?995+>'IU؏*ǟnΉS[TS/y'ZG~Ye\1 ҥR'rL$D)"(ylĮJY16ɳU 6%ͼ˭}JHw:ݧ_L*ӏx&>~4M"PzoE֥dˑ`u}% ka4On٦jv:KjqhZv[zS9!j/y9/bWѬݎ'ĩ¶ODu/>tNy7qrc؞gT!/R~_p| 5䍝!I:yWqkBQWӐUBj~uL?~f 1$K@+5+!ЬweC!-1bBeȆi81 UL.ˬd؈ wka|U7/_7 2yMƋɝq-. ?'Gz;mQWw0)80ѱژDpIbDAPbhjhl8(:U"N,\GI5{}g@B0݃nAFyC ǹ]貧K̞9Z~vAwKA}1WTʙ}z9$~(a~jbnj*JZK}Lj@tDJ*=nb>VTcGM"HaJZX'DR ,D}~Eq.o.wm%|чWnX@y ˣc&hu5ċw+®/k|Wx|x{yvcIỚ>"%yONaR&c~{>)Y/ƛCRߔ֜ җ=$#jHKRTjl/eC9~y)ie{)RNǂ+Y8sPJOlϝo*SR^x"Epꫠ@򄅆0L?=VCP^]> @R> '>:dFk>!0`oLJ% y`). s  (n= jn.ƃrLפѵ歵]*g li3CO }49ݵVUb6T-lbJ^SkK>0'uRK%#֑|ÿ/u=q \}qz6ǫW彪MN"dCeob#xl`nomm7vn6.ܲeے]OJu~Qѭ B\3D^)-p6tݚHF6ME'rGтW.  G5)k@fx[0ZmfFÃgvp.k-.IwDћw"^,>=gŁgR9$pvܐlB):'(u8R>IJ*H t l*P2 U.!QAbpT,Ns%*Fo|eD=y~YϿ|Iq(Hgة?nOї_RHuye*dzv4hQ,#hLuQb|>_~6D1֑VDD,a<"N#p0EOGp*i;&IuE$Qs` J7~iup5 ->!W G*=.wW<aG%Յma$:#f"`k0 =9hNZQ%|diC8ݠܩ%hIwE@1a6uW+VDWH79<˖Yl5.ldL*8`oHZT* IhGI*82bODXD$8r2G;z0Ӈ>u8@jZmyrlGXe%EE9spˢDDH=q#5cp(2X04\h_3V5t#쪗dsKq_jr g^0J{w&E3NPE.q(w+NxhXuayc6.xl/yw>Ot-A:<̆޼`ysUHؓp~6L9śX⧂盎 9[((dGTHq5U`IDsHU'ЊK+#:Tho">ۿ?pwOF, @Fg0ڗT#{~J߬H7^W䏫h@%eg_bCWChbe{3ٿMez|/>hb%(Seݍ\qGm7†PgFht H]h c歒{5@zq=rxy} H!ULJHUbVbPI؁hK&JBWt~s?>6ZJp %r3RlP/ 4ODG_˛N`Bė[+^qjlnnh0%K0TҴTkMMEo[ռmfF;iVZz;:4ڠrRi@ oCKӥ<U4]ޮu677+'I{^?[d.CVz[oN\6”6ZmJ'jSզLoD=WbGK;/_.Js$W?I{_l%9hbK=Yym2imY&Se')QXk6%z9:`WF B9N x~tN26H~ 0:q 7l{!v'1iFpy,C3CBQxE=ruPSYX'`Dpӣ(t&Q 1]|x63YsؒD9JjZ04ʡuHV)SV2R[ն64 Z=Ny8 Ho Iy?!D2ͭSHO}%ei҈՜kQR1>+NEppdnZ]Խ+,&جx-gy~RQ-SDζ0,~~ct>nf pM}0NLA3]~׮mգo޾>ߘOM&7ǝ6W~O}T!ݏw9}'oz3 )V\͗u|(4)AR(=,mm8oaT 2#*뷲!L)[){Vߴ=EB/ߓ@U!Eƕ:z}JA`I5 R[HDG J h{OS~a(#ukxGA4B jԖ+O}Q0P"{J '3> IAw*} ۷' r]S-O2љr$2>nZV42IhT ,f >.W(&/S|ϑ)sQ7/GT87A"|<5yzKJDmI@I6f!"Xɩ%I`< d S8x 'N($=C`] .V,oA!'eumzs='L:s"h.ǟ6^i3R/J̳W$ $sY&LԠV{+TW csaTSyވyno 0z$_?Z 娻?K:Z'gT^6pN$5IR a.`8iǠA#<~P/9 VpQry:@60qt&1LN}B'G/`L)U3lRQ(wX! )b RSmpNq-@ha7JLAjJ8ȚYB pypd]KDK\XnPK4h #3STJckJ,|@zǥ` y=i$N_CBx~mSl&uB}v)a #(*h}_x0 Cدp#Un'[O*%2د_#fc/Jw͆xA{mf&c_}Z7wx 'cWQMXKW'xSOaqm+~n ?L+=푆/Y݄^~Y~:LWo߆$J+osɫ[~^>O?Ѝ[_#mխ[ۻNf1ovSa|u@?岧y< DQiKڪr)'Aó G߼57c@$bm)msӘl7ɇ`g?|bg:*sO#|]v-mkwonF/6@=JJ}PJX[! -.O]͚, uY<Gޭ۰jdV0B06ڀG=S;ZĢwYPdEmuOSI@eusk41nV}0 Kƣ=9++X /,>`Y%vA4۶$!{XD,Ԓ &M?4..R9?v9)|?mAz8#b'$2Ă] 8LɼnFӚugMK*AYC-ΐpeȯV+p!8hNjF9l?4sK. 嗔"^~[<_\Pb^v)Z)~J.4LA!0ej$OT}>>]`y.\MAչ'¦VP#$bNR,i+ْ:I :$98Yl7$Yb[ +_pWy0,)!zxT dL)WxOn'GzE)F] B0|eŬ'I3)AzWh*u2D!ʙuپh9~o7ΙW>iR}0Tt#[k-堇,h$2X*Jermf\Z)d]yB+Džr3]sw\RK6'7TWhV4q{Yñz ;Oe4jrLUH_%x@L\, g7_;|w0I$vy_7VHpy]`|&"K6OEN@XI[; mhHRJ |!h;yY <{ FlFsL֜2VN1/SK*(! )M9i$ Kk+ƀiVO:U=v~bg?=6ג_)0;W "<'zIsjGv_%RY-0ap{Cғ͐L&Ic X b DtKSP rdyiAT sz8\n$r^R TIQ0UE! 1 7ģq,?YǪn׍(O ?1V?XcC7Vowj˽t‘H5D urV;υV{յ{-EqxwwU}?t\YKHnѴ_ Auٿtz젎%bAfFPIa)lME=k 4^wYJк]cxqT+!h`α&up$Ǘ!"_>v/\/|qk;S56BHe:|k#j% SI}V#ӂ-PTk&CA>|Q), ֻ^VDP3܋(aʚ_amw(Kgcbg=zBQg6[y"%)R!6mGm% L M ,Po6%ѝ)SIMX.sЬ4Bc( iF"̈́8쭽W y^N9jShW  dieCzAf Y [IYIn*Q7+\A7b%n%E'*-Z%l_~r?s7Fthy" @ ]4ed^\c<35{o¦^"g@K]!2XʞP7C p2TF.wP4Pjak3T 9;ft%10Ru-!RUa}r|Zwkm5oMhQVZJiʳpU@)i*˔!j8ImtI[¹pk}7pגuYvfJR|+ƴ1PR2ī 'MOti(C[at44ƣRv ʌO7B`-}f>` jh-ϖi@E++GA)Q^և}24>n41T{ EJ8Իrhc#fR㗷V _ƣ7ٍf+( fk㢴X"śt.o$~ݶ]Qo}Rɓ'lt6<\f˛& 'jZnZt߾ nP2_'q 4_ BNwQޏ3,w5&q.(qGtB(U:l]j-)-;8آ;Lս^̚cOKsZ/Y[trǝ۔4y XhݱTh6Qt{w (-iK?UT*B QК;,:HjYjPFˉj+<]Rmj8=|v˧ߪV< |2(&\et(#RD39gog1>\uiQiyJtDSPJs9P'5H)HuO CvQgc:a͒SD~ʭ.0j6hfZUEjVKX\`غ;~{~ֵ}yP.!b9j1muybۉcߧu^:#\b~. (חh,7C]_{7Bub:n=F_i㒤mpn1,7d+麗w Ձ~ǻKbt-|&cS3.&@%:ޮ֜axW=x<8wGQf|5_ͦWvst05qDM0rĈv$T*0`3h5*? UUi9$nZLn႒!4@;Ёޟ2Mhg \=T+ʏکnW ڍ2N`H]z9m&1-$F9=޶#Jz/xњH=WP6݁0arHDnf|53ECB6ݡNF*"{*@$zkx]ż]XRh;!):oE迗iMFia-JiZm;<)598 >!Z2#5{ uŲ)ɟfooᕘ*z͝D.5` ]⥡:kXV_2C@QA9}{ i$EZj=7T#d9 al =Qj.iU7)B"P͌vCwNpTf(=uq#J_a@T}E6 zo)0uŎe{F<Ԥ&YInpߝ&#l\K|oo#ּ#c顝dBbb30[:ΑTgf܏ŝ9s3nښ}cĪ) czjx  vhm8iDnxQ1 jxqKSVZLja%l[_ߑ!t[Kʶߵo)ZSx4f׺TR\!e!sH3q^<Phr9UFr P:w PP׺Џ؇pdա2L**+ZR3B51 JL 1A4I IxѲ4OWYd> {W`*:O(<^63- /OZI{RŘDhI$$/T+5 {? &0>!Vöj*| 899:tuf B aWZH d/D]"WQ'rw`Ec {(6(`"3M!!ڈsΑk~k4%C #~ڡqy^;VMq '\$ #3Bf8+l/5%µ#07@r܏H,͹I(tyE)*3Un*d_!S75]>7Im #(?;H^8NM:'93Tۉ ) Y?) jn{,~E0m.;Rˎ-)A7ΩBߴT3Nb.^+ F!p/A.DibwYHB6* ^:BNd>+%^\iѧnK#oT[Um@H)$J1hV"CE2YdQS(ŅII*Q㭽cS~I􇬮7ԹyzTɚ_?:zP_ݼ?!L&%M_ƣj:_,7|\f!NaЈm}&@>Z(f'{)E2J&&LSڨBIt;RCAJ 8!ӓ4I0՟\$ c(Zb ?&Upڣz3H 2Prbc\r!9tLN\?ݸMڍîĵȉ^CyG)ԉXȉ+1$`NhDqiz!DmH}%y,XU>rM4_v?ZoU՞q<8fj‰`*黧%`&.hpF Lc(sba_>/~]ކ%Wkbr=7 !&81mof,oxZϋp\P\~r-D zzɫZ\ƚ#3..}Ye B2pU%VZ3FQW5N]?}8)"  gPd9'{*~r?rB7BaDFf4 d\2uc`^$ #ezf]5ؠxwPH—f")ӂL,08)Fkq gF+3R(A%1[˼}?b.9W2LdZU$͋ʌ-2i)'Hª 4F+z/U*GJ)d’eRUEte8+JYr(6&*#ey1(meQPe\Cwj 1Nv2O7~sK3w@I"8!\> Fj=29 -z"$pc"zT5I/)]\N 0`_?Znq&g~YK{_}I;HI^-&EZf6_?5ǰaUmwj<_ף?Oq۶<]jqSj^Vf2Kozm^ݕl2m?Q'1dy4]{aZX"e=*oME뼌xSnO hT4/IeFM!HtNL,ϓ, WI{߼SX 2.ɥ`yhN vY}_#;i 8֍5Ճpi\`Hm!z\uƷa>DMyLjCԹnQh'Ӧ(e sdEEís2 ?;GoRF$UCjٛA&{ Q;C-.o'GhVhGIwTj-*GJ{fنRZӎ>ëWO~z +kh<dNrʘĵf(|YOȷ] ɷ2 Q"#"8tG;& diG1}ʌ%ɫwa:)L^yA,9gzɕ_1lp᥊,dvI.qoG俇d%%F= ̴dZ-C33'?$KN*v'$κ>|v~2M@=xXmGdztw>}xV}Dxۑ xxe7|]=m~~`̹<~l7>Q m3YQD&}-:rSMGX]O*c_ 12`L}pcp+g:P l]vrc48en&c_@5ysa0|1( vz 1CฯKPHmZ~  :0c@q?`|cˀ'a>0SmϮq%`dv!ybt?j۞yf/ N;}cfOȮg[2Jbn~HE4cq@.Vdb!<* fRh0˒T'')gdLeYfsL%$y#*]3@0STK\XY9O$cTdD9&'HYa,$$RanNw1T.j1$ƏC[©ij?WɳMM̯$ioa~> Pj%37%wmfҿ5LRԁ$6|v?bVOiij*C2{i{3_P-B[4Mכ j*RkS\ȷiZ]ŵalr=[M0Q6)PY ] 81b 8.ӹ!YOtmyJ]S@mAWQ@՜.xԼWwϜj+ AGc+9\s*zGIEF%k=É6=$DAw:竅CV\9*14:Uw*ȈLQFHqɵPx\)1j_6/qƨ}+jkNWlS`]mI.[V\BjMZK6g6ߡvSԧ_#G{Hs!c?LzzwLw }]Du)c6`|U`4 WJ-GhWA[~w`0Nށ+MCHx^iX*0t%oOۧIaQT+Qzw Q$ o-hgs/S;K c]]zͣDZܪu'W~6t䫅 ϨuuIj܈ʱ<_Z-rqC|ęb5{C0oU6rvlχzyxg6no9;_"+U=>}K*j_7g_ fO|KEjwWEЁ]qV+͕/͔~CLA6e%@\*#Q(uΜ;~$n u.WnO*ڿ-6̧-W ;{xHt&KpxܻgCu䢉$enk5Y2}^7ӯ!s_-+y :)I^v1)+F_wzblDҲUM;(52T̈8oD󷐒ʥ$6r$o$Pl)0;{5p`rbۄYP(Ńig#W<t3EHn v#V5n\th;.[]fbk \Hi=PQFJ5-`i)DF+Bu0[Jg"#S.|.4.5V7u޼Ӑ& 54VeHTc@T,jN.6YsdI-eLNc.dEM( ($%/M̔DW#@-bJA#u(J&HEFOA< !0`>tHsа}0>5\rz>'!G%HVRlo{`5!PU|`ȐX{I3;9!*Gv,F$T(+jskk( AmM贇Ԧ94ZAj+86Λ6A2COG"|bBٰKxb< $Z`:gRT5?&G3Dڭ; ii!+TrL̴A7c92LeI!X60aT#T4f5s<pęp)JW-U^dzC}x qe ,8GʜZvwXJDc»c#cxAiE[gLvi2R*}Zb펱w,O%SFX;(wK9i)*`Ɣlm&ҕe~Q>ι 7^4yR=O-sSn2Lobj0|7nRe&7Oib ׋OL&?]9?_7"{|byqXNɃJe7rYϏkkλ>탦3r+ʘȲII8S`$<{ka-n!\#s4+Xi=ȫW4֖xtŏVj0$!H!f\J9ۯhFhwsKl--Ʊ/˱_)I|G?fn5_|͘su/?n܎t6[թvYOw/d~=Ͽ[rfޱy{nCoC/0p}o(,?:SvmǣICF-uݞgNKkԗ* Щ֊r$D)KLd6)%s\`,AMftԍ}- K٣>E f@Si;@Ea)5v|ғR >G[eVOR,]aVzzVJ-sK+-5g-f\OJYirAi*huU`S7dޫ9`;rn_I] R5e^ziZT7[8'OQT?㩉6 GVƨWźkQ*Æ1$Ǩs7fTT.} j>:yt{CׅS vARz푙!btfzG93E+}sC8]rU1-*96EovjQQBVPkբN+m6TU{]өnJR':5OS;٩ ocV qR= Q;ӎBŔ8v+mjӿSZ}<9 -~؋Sܬ/mKm`CŲ*ًMT1MYP)'Q R= .$^qx3vp͆&+}?<'onQՠ 4 Wԕ GROXrb{MtElNن1>'b\„N&.Ҕ%`*,])|rgV('ĸJm}kM}w;,EBP0³GMN1e\y=/k!si#8ulqBG3q+j9T@i 0F hB`ck'n sK\;TD|`q 6-ͯl Kmc`WܜdY9R|$!ynQ.5r.Tt&ΜDLD)j+XR[͢ !B%4yV D :\0[$HD եPFi4=ҹhT  %f@Vڽ2I$E!uo4ܢ$3:yj.L)z:ŵ&<9%\)T(rIɸLg&25%ԚƸ1V,߿OOX Z&Qs@>-Rګx|Q4і(}/{RMI%4-6$ؔ" Rd^: +'mNJwLIΡyyzRrVu |;-ZONЪч9u::ڗ;Co*eZvhupk2c_!q  4F1-"Mn^O`)O@qoRi),$א$Rm5 m&OUˢȒTHe&rɶ0Bij-A RĔH jm7hʸֹ ,T(ʠЩeIjrȭTwTAm63lm2 4Sc a ۭ˥4VrD҆AN*u+l"`1ڦ O5Cm54Yߢ·haWú*L<І!%Tf-j6;k3#Z)N9 V"KJ,y.D<%sN:\f)wd)AFB5t ;!A;KIZ'CAN(CdS p%-]會T]Kj%Ď#ѹ DT:h4hp{@[q5߽Cq?9.6wW4|.Gܚ n1'߆Ђ(9MȕR Tǿ߯އ]C_T ϊ7diPj~zxZ<>-9\1/kV.Wh4TʅhKLRrnRf)rYΒ֣FAhK 7g*K^Q)>_/m\يin!>-H o)5p4|j Hz:O&&us<4;7.H!R.c·%~2|;Ycw-h7PcP[年D T4o4}FE4z:lI; ixsB1z^0L UrZ;WxH1+"ẗ[X 6A8$c@ [A:"*N&SoÄfɖoltl6ѡ@q:\2'qSs# h 6*aۼH9Қ ﷹ0F~:~Yd!`|&Bqť1`t6`(m!: e͙1(K.ṧ[$ }gBa"7Zqr܆ -CjD|7Vk`\|mMR&}Pd iĤLT@)ae{ySr= 4Xm^eL>=r`SxIOE ipqB4W3%QD *5P8CRs,r$?lʪ PdV 㣗"r̈́9'y22HivWuSPx@ZW+siq<}PuTes=Z,:,\L[DZ*ϤKN6 Y"*&0>b༭Ji-Q#(A,-rH^b6'e&IX>fP5#FZcPtDH9Ld-c:W2BIUJ } XN4/9!Dk_*\i:YkI.sx>=cx\wJ'G /M`PR W79M-H-wȜeNe9sSQ,w@4pR28wK]5uҫmߣwRR2 6^PTd%^Pۯd+ }{zmM&mYT:(M:( PF: ڛt>Jt:(&MRi-,mZY\l:Bcm=\$-Qt8=N`92L9,=9~K^C&Ce'>:hKf\n tc޵Is*@l>Co򲦂{蜀15>m 6c ^a8?Taė&ləh<;I#K{ Ű'M2vIy߯6ӳpI+7)q6;{Fm.{O\]ʱM>z|fV^l >',hhoߎnv~~L.֛ճjncOa:npamQ]G)m!6xNޥLx:}C{&W \BR Uqaa3jĚ1сV:sndίY g)j52DpR_߅s|㙚}5$y06b"mo?76Z&1Lgj1>vfBu=l !POr3h.F|W /zڮo,ioA{F|(-MOdCR}}׃?mկo0kѳNލh>b_(>LyFp=MI( :Ztw~CQ]鱨Ϭ5mW1Rd mKViDraNC@4 k٥D?u)`y%^.X|Y ‘6q&(W.`eV" Q)"# &ƐAd#mR MST6HY9Q`d/>4R\2CU속M:RkcNhG@./)h+΄BȢ֬FZYqN؆*.ԙڸBr kTRL\_aLtQ͢eu2$n4?.i& I-f/P_B[ n85Oʛl]B={wZ'mc

  • pIҤm}?\J5}swV) ֧¼xB봛WoVoCo<]{_mS MƧf.ZR!+b=w>: <inw݂)=!C S Ak-_O:6E5wo\^@ hHN~ǹBޭV%ЩFvSR ̰[@K[y]/o?#췭 o?Dͧs|mԑ޽/&3b_u(E_{u"A{pU䘏1B}EL"o@I}pIk8FtbU\r_o6y.gg|vOoV9ao_̷>Z>xI#Wo礿ooiEkMs>r4WAэ'^]LKO}eXct4L <Ä$S7"SwTFC/ЎDZOY[]T 0%^c5Vk*">Jq8 x;Y2ܓ1F%B(aR y bզ:E J)JD &phb& @[Ng\"!HМ"M1o]õ6BjX oBa貃KpO7%䉄Cr*{(y  1 i^2oErE'D\kAj%5sXZLsQ ot\n6q.7>!Z)ԓ_WJV%ЩFvWѼ[@K[y0sͻe[JS.퀧dӅȡ[@K[yS2N7_,/[m|Y]8x=KIkV[|%jo⛤~&ʺw*,f&RF9pՍ蝐Q-K/w]w;sbCx3ޑ$p7\סU솹,r~Wqn»rvC η|+q ĭ׈[TOYl)zX 6-|^'X`%cC2 ^JwI}&'_z(*C)P-XJJT=IT2&Й>u68f7$}.[JwK}*ZPz( sj( i_z(B_ /%'R(9-ңF2Jw@2RpqBTa%ݞ)KwK}*$N(=jrURt( NwOGҢsq(-jUj '#Je//?IRLv&a"+'Xx_ !ҦhI qY/ܼͮ_`f6Xl 3y9 EM;_z`!%-ڒEC 0n"EX"@&)L!)D3cc "0iu7a{n$y@cqGD2TXTlZ( TDpXp Ә:M(R (AY:rѹ2s/zʅ= gݻ/`W*wtz.sWPI"Hk>[J'(`6?3}q6I1m,es2+Gh1LJMWV٣V٣J=|,{+}y-TЉ,RU娐C%xGӍsz>2Ufk x&7ade6=,fh}Y/f1=h< $- o>48OV #{quaF$ԯޣU!Wue}OAё6:M{B!?<. P:%jt_GR\wEbvMŽ0|s ;M*SW:B !:h+޽5xV:_ DX)Qp`rP#ڡV-M VRFp;DL#$(JV%Z2-fJ/ Dp 9U\c|(X}χǏYj q }R^2"E4,>%୸%P"%pB2xW<% pBK'^))xlp6J %>Y[npxK 9D{q9x9Ǭb,=MBMGjlMyoC:K֜z=y./Wjuvtgf-WeLT-GȌjWrTq,|sJ)Z'$V*4Ļ+/fџ30e~(×O}j/;NS8̆;MF+/BZl PYF%$]4kHUHWy/p!2LYd%&Sf@Ͱ M.yBgyU2*nѽ%pS{}5<N/僳Jx&F`ijy)QNN]WٖssjZqAsS6HgRm_J~ lzQJQv2uE &lh^;.܌_;Ya]3 Og8OtaNq׭<+A0 DF E`M۾nM;[2u5N]ZBq8 /q Ch`vA wB b1v72 MPS[p6- $Od+03Oé';H$Ԯ8<~*UW%=yPE`\g&iP:wʊqBS֝LBPENPPQ֝Wze(JӁH!%ms%b|#SH d@CK)i,4LZ{M:3,Ǟ vyX=B5C5Zu]%EaݴSBTv2Dt2]~³)A%KK +klDp))S1ȵE) R.\CВXHbtEXbRf7ҔX Lc ւ2b$'(i,9FmLrzeDu?Y.$3g{2jks/ﻅ&0w/Q& y3}6ky>Kbʇhdš8c%v\Ўw'_pp}vI;h,vߛ`؄J. F~Z臟˿tJ!DX t {q^0͒ekh4L~#-hwI/"k#$+p@?g=Ua-Jv`R@|gp;!N'ЁnU!&ۗMɢ8yvQ8B^.lQ~s1J$"EJ@m0[9faS ̄eX=|Vu^>;~$m0ͪte9VRiÜ",C*ُ "fF%ϭpI`, .rkKZ3 ,x[- mWpj&]l9ŨٻݛG{`y0ݻ9;ipoo>{_U%G l9"4g>ǣ~qw5:רxeo#08ԎNʼ;=ŅK.vۻO/6Ղeyopf~oKp-]-)vm. H]vn>Zp}s6ku]AuX؊mRZR:V寄ύUlHyE~Zcm^֟r~>,Ya\FwL u'ӔիvAb{swhF$y2Dl~QrxN(榜 {dQQMC0I#:5&VRX$C"||9L7A6V^(/93*ɬʝ(2M CET  BXńPd(.A8 )aZ}VYu0e%M&[KvG)|U2D·E!RI΄JhJq.ųj ̖ܯVY)m4c],Veghb7Uc h@ SHc 4F 8DbdHrU^ݕ췅=a3hu8my|D9[w7w)}BH?Ϭ-f7[LC=2o;\\rԁe;8 ؟,Bϰ(+$Iq M v"B0k Xuʑ!bSL $F{`+Ev͒(k1@V= `#ZگjwSII,9`v .)F&R2'Dk"1]U DJ/Tme}MmVao(,$vREho}4Fq):8 Զ|rcUOU8kTVhc F)"[e3{o>Ar+?UNm(%FE^pai#" Zz/Fٴ7ἕs|)BÞ9l:1W1ʹW1J=\:F /11 KQݏGi}[zw_ȱPc=~g}8cYMuzqw rz8K*J*l qR1wZ@$s)r3 zM{#7걎4ΨkޣnhMӍDeF+8gU&zQ(@%j33n}ƍg@ vQE+˴#xJ(I(t00aP H   *7E~AAa0i6#,L258&Ct*2Jo)Ik#fL@8{F粷MP@c佐`(q@ArYV$ Y+S.Sbk*S\(0ո,k䅵Qu%&WsX _Z>~,fϯa;~aV(쪧b:B? kf/[\6)W*^S+8GI^dOw*|><~F]r 恼%r!w"CP@@!`(/OTSr --OVc,E}jFj tR#;[`l@ F8iz;/J#\aU M}&oznRDL"r*㰪kr]!Oxz}ytۺbVm]TgO JyP [30Q]5܂YW*bIVKGUd`d ; ;?΍ԙF('w(l/ ( b%*µR9SBΗ:Q9&EpDF5u|1t:Aj?ܙŧ˙1(떉Ay&[L@,5Cu1u+sɐGW`J5efuJ{m1BPv1B mCi>s((2Y$K]cP#QX$81H!tJӽ"8\&@ipR„INkDjh5jyb$k]P%ݥrzb~1S.>r䮱?{ܶfR~NWMή'oXP$͇U_D*%Qࠧ====|e`u *߇͌7o~6V#w?wi6dy׹?DD7Zgf|_M'Hlɛyv#֑1cl{0i&XN0-n}d< X)Ĕ+aтݿ·CNlx 1Gzg<*;p߿vt}c* "6zEz!Yh95}=< *ҖI1T0a:csu.W}X`=Y|z|b|u'3M~6D_)ս 3 ^I?xl>] ={eHOn4J]{0$P/bo^A&Wv#jΒ\ms{m b5X i"r>g^h|_ɠZEkxE|PQE%|[^/8#I6 Vh,$3V @S"F,̇35,32gk|e林qUFPҋ'\c8BrY%/mIcAYQhLR#Y3MY Y]ufb0P@?_Xm>BK"f+r&96^~槬g0Ο 5M]ZbwO&!#l-,K[\ Q+:_Gٺ6¹L`LA9,~f2_Օ}d~_34o&juhei4]ʝ%`g GxTX"}AVkObdA~EZ NW]dJWJ5a+ ѝFΉǺo8>UFx,Wen 6D"R(bM D#hdd)a1Imq,f;w[Ts$('2Փi- 2:1}8[uSn-_ N W~n+i5԰ FRuӧe1Z'w! M"&,ƦBX.q2pGOw8W8qyHD(+mLq#uDK9u?St;>XrDr^>s nOS^ʀhRȈb^Jb@$0e|F* B\9NAL3;tBcW KJLH$8T1#,6戣GRa0." (Hy(#8~-.j==ElЉu` 9},9MSiR)GnAK҄&*UL"dq.jG.9MZv?Kr'PhUxo/L? ֩&2ƠP$a 1CCfԞEIx ̩" DhH26F21 Ah{r,~RaGq$ew?U4\bFLM䛟ql('LP'0 q ӠRs74~B]jA>lVTT`' J!468{gp"'^gf90kuܦr=]/p{S~`Ɲ|o`OzU69Gq?!J ѻ,1{v Gk#LlX7!:0fN#>&(~)LR*-(@OL%Kz =̃R,(x_ifYJFȳA`d8,AR$*!! B,1iD"VYV\& pQQ,-ո\6QT İ]LG.RM=*Uu0v6sY7J_fBy-d4V% DK?3i9 jQX0\No7[a`Dm @5dqYh15ә?ȎusٱN8dT\QѪ>}0T1Ri&v8mq6 *G@;3zЄ`b|ơ=DHJ.Z[|#{ 6jبymS6 DuD_4N`_UYDr:ԠV͝r@ E"KQo~%f^+4Ѹj׽/WKaJ~&RJ0#T*52 DQBFRSpY0x$($q#YQ;+TLW^g@M uӭ:L\ )U@5+(Ќޡ&aw:鏾F(*f&/BT766s{}tNܻBY({xkw=gDML޷Y0 aκ՜: nԹ89!VlD+#v[v{sB;a|oofO(d48cUNIFD [~q)lZ$RPCp I.˒+"4q=|^ʔP.fA3FJ~);N-}Xr%r;E{ <%l[?'lU>WtXSXEEUY%n p,AG`:xA*IM^\!W'pOde5]=Pr Pïb/VXbLX֪.z3PD8CyJՇЇ?sQXiK w BKb3>b:M5~z LI>PEBtNX:d$(e݈ MPqKsz]2mBLxoNx3K!`Nɩb;Vg՚ꪼ.۞Jﷹ(6WBq&IBKe,)M# 2#IH#DC(p)).NM;ylQ)(91RKƑ2#hbT%I(*$E<2ćy6j糅?YG[{^ xe=d_÷3rkc,4{ wvP*7]K "H=w7 O2oAiw"'Ha-Z;x^3)?][oȱ+^rr}93Z nf0< >+ۊ$o%J&lRkL5뫮OG`nCs=>%"8RJLxQGht=B Oͫk8ǣu/-mTR#5%+{2D~p.Ǡ{ks)ibsd[C<]y~j>baʡ²bʥz#;B"W5B}ݖ\zo 2ΘKM/=q> %I/&4jL1ͯE4 ApT3J#Ug\߮G3CVz!fC݆w}/?ۚDz`.6Nl]S0D,℣"Ɣ,Hq, C&4$*V$GA,pR*6vٱ1wjtwġ6s!jW]cj %~V8r[ݮXb P9 Dg\c&id D8M$"fHUBRX=>~˜P"sqG 4>$N2@"c(_At 1qOF-՝ ? l'/MG+oI7/j,K3I5-vS $OT(`C aTᅩKF@i{WGp'´5/@n~-'OWWssяY?oȁ-cF Hۄۻ}O5#a ܚ;:륚9nM6zZ[p>Wqd'6dv3Ն,*ݫEn{|/Zu&&v%rO{G\cn,J{6fj"ރ缣x(L7(5VS9z[aP Q ) ugAE=`raC{NBœZ ;H ݲaD M|F UwҼm֑Wb$+4O>{y!LgZsR{/9&ԪB "HI96;# &gr\Dy Z ti]ƹ'>GO85ٛ拳~P;kxhז8ME 9؎&y?vO~ɳC =:LXM(C,7Q'R3+力z7Wؘ;x/qͻYz&,7Q*VCkڻ{wsՁ鸾wrt!m~һ5a!߸)$ybMďG>9YoAu*fI#ˁs']+Ww5:AU`98B $ٽsD.]]]DJV{ET(g%@:ݣ1RtM_HGdW?qJKϣj* }gd+nlEW~U)4kN8b zݛG*ggG֊SǵX"C!i 5DK +4hk Hpr|9`vfY})BN"_i9Y(j.Sͤa9?8ZqZOQoX9ABM;auv]۰Zax3y"L$@X-LNYȂ1gR X / @pɿK=VkNG-9W]caH"8!*NK CHQ4b3aY0}u6oJaClx ch,$DX7FPy 3BVbgnM%0_4NUExR$[p\+5*_blgWҴp&ѥzv 5FEJ/JR-9gR"4Z+h+J5P/J;+ͥօ M^rbgEBR$9ra9mc]'mJ!zUδ)H3Y}9׺uj>AYoH^Ml^'YnPB)_WZaSᖹBj##hyΏ\iyW.F#<_`O2T};ħ.̢@(N[ݹfYpBX$/dXOhj& o?V4M0U,PKIZ$^.I )4 }@bFDbI4 D14R`(Fb6-aºF0DIYT7X&"Mx'nDca^w| &B_ k< $%ʑ5 R%աi dж YBIcGBߓa`JƬshC dJ nmBM)LN4Q !q l[$Z48)D 5̪~"Q,r#k ,a Q =R!"8H TbSSH4y̸߈sFC(4BQs,rSx6|zXz|~dc9{ՍTf04w.hBfy}Y1əPv)yu%$v2ꕺl溿sq^N$wXWfYk}`! ]Y lu;E=ќPiUn# `,qdDMT%@!$?my\7T^1p <`Q N;πp~S+Hj׍|a~!խ}V&V|㘊^D_OE_pW? jY qr .E1ҫm-W2-lc@-Cly4lSEW!k!T](/ éOpV<0qv ,.^8ɂw8 ~P" ym݂1ζ.bKEx8l\VeߛN+jzaHoSMe*d;aRiSlTe.0wKagC2x˰i0u1M ܄+Y˟pl8.#V+H#:;@T7̟n3F 9 s#l=yeMY*Y _Ն)kOKs{.NF7FG9-u[zAVvvDın ׁB*dk&30-U\q?Mj뒩=y=pYҢ[)>}uS|5͆iAsohz#z^BA 17¿aHt#hO JnDBq[:=!0#e6낣aMq 4-}ᚕ+g^,d=?R|>=ȟhvaӃ\  Dl uFpihqVK#u4R=:v+ʽvE ywm_|? E^hѢ9mu-9EZCI(Yje;Z/Ap"!Z!g $c U2%*@X>>Z2 Y4!&-* GN *ϽrWax'T8lJ BXj;ruKjC:ѣ$IDL5HlDI?G$:&b--}@yJ+瘦 I@ Q&GVf_!a sf y^񠉢0NUtsn@,^ v g2ՅsHaA )B^3l~  /_k- xӠ[4@&< \r`"YNo*,2)%$I̤gR97t N-]OFL(5Zlf3T ,5˕er:6Vww1t'3陉R?ILW.E2n؝j [mS8U?55aJ$>s_~NIqZ)9*q]mkxJ(lW+[djMH+eJX_VYPǓ ]kqva"@^&mşDp}A8=U]) 5n3_| דhÒ2 0WFS یT}IyF9%˦xr57)-T I3iNJ2]Ec>@y84d8ol tlyii.@ hWbPX nU Pb;V΀7$ Jm[DӢ3 LjbegG&*?[c K;1*Hd+32Ɔ; ݜA+ĒXw9Ŗs .U"|Ltk&|6w'd>Ifݒ:]2yJjxyf|'(R;R17X >I浧k㰰&fІR-RVGwRk,{rzoG|ǵ2i9m<-??7E?> 3J#g5DjaŻ_@N-<"95Bg˿ dΫ///?niu35=z/:FLi[= .x]luK`uVx)5P!R@iu4LܧSDrטJ;?S@BNq=8:%&g+a ]Nx(?<)L;d//\X|;B w!|`9<(ZΣr]܀]Y=0/g +N\@ bǥ ^ &T!#oq>xMbz bDtBClI1x1/K\d 0G&e* c4V囡P$nMPn%.U P6ऱ YdYml'K 3Gc!0u PLq̕_A`z>H1p_ Ғ莫]9&$aBqmmQ'88hu-;27RTǘo>6YmP-.۽;GC-#w\r('ewJjhKDU; hJ;0#".3Łp%5a:b`]XQC*ì<(b %IqC:mǀAmwf{c=x8;@t73U Q$lǏ< rLb U I t8iۮ#3#Mhx9RfMƒ )-5 7Xq3N3c@~w+˺啻3ţ@~ %?_?c7~ ~$o7r%٪J/_db>[!%irÇ?01H:cD Hptl@C@<@e̩9/'5w,=dѧhcENX,'R4@`nOSR^~T&` #/P;Pp=IȒMeS;BW[':}) W Euwݫğ'rp'rޑ{qZ3F{Խ* ' -Gm2!~lH3֙WAaV1o91d%CKRH nw$=^ B=RY(oT})L+"yH1~pu;2wy^_@1Nmcmm7p.mpYok 7Vp)ftꮡ6svCsEH z8挌j(B81ӝ/d쁣Yf]wNc-RkpKhY ZvjZׂk].+ wҙ]LIchskGlvBrw~1V1!ei4Up(ò%& -aN-ZPZvML"qѓl9lίy$$ >[LM35}Vx,(ILMTw_U>O[1 ].'[fr=鬇{Qp;kjm5%=v_Iӝf2Oz(hrο^]N;1$R[ D̻ݾ#`"FIxTkoHQXB*8 WsFL&^0è@wִU4K+DV/D+qku/fr)|`"E1U*aMj TaS5> AaF%% p%øǢ*/mehJ{EE*# r1]Ekb2f4CM`Y,bȾtm__[|(Wlc~ZLK?+.ߋӯjlf",xq_o¬IM$-((t ;/WZ%9`B NS7p=" 0Ղ|CE!c5 LJv}DPS H*=6 :q$飺G:chDzƖEhϛ&fBb0/)(gxdt}Z5+Ft|zoLQԌ% E'`N2DIo7u2.TͣET?cr C_{4?Lӊፍᒶ%1vteUL3Jq) w V-\gZ %@*gabjwcgm}vP iE| H^Ց,Wpbޥ⩯tv5c ]"㕜}v{ٮ>JF ]T$@jk1@ jU$6v)Axa:ZKWc{fR|ECj04s+7Khҕ+ e u5䠿eCaXmd }GZ­ k'GlqU k߂ oa?j~13eg3@{k`Ξm'ID?/R ,xuR)x|`+JY_~9ڿxQNWHʆ!}s76#MP;HksH7} Ń3y/ن"Jzxa,nW4g2U{s[_&`SKǜeR,9JܲWC`.$ъ( Ϥ#YR͋aDa ;0Ft.k,`,o A4i_yӅr//\X|;a< Hd'C E/|QG>xi T/B^fWH12AClՒA8SOy9lRkP:`~q!v5./xN>KxE5j*tU(QTգGN%WaH J)XF pb 5RWDieeZôْ[:P2+£ٶ?ʉ7_s;4:nd?t ifgswryaOCD9y|죂kx,|^&&OMO>HSyCc<:뻘4о,\tcJז`CHZ5q">4r9ãftpEqxoϭ=gs\< n][""Nk@ȼ)),%GTXc^ԇDzBYb<ŞUQbX"wYE9#.LC),ֱdJ`]YF+~A}tD?xc¶rtB7Wl MŠ ~Au7LgX4~gh;3"`ZSxw)CXlCW8y|uh\`ӭ@TA x' ^QQukREe㾇*X#p!OV&D%" aɡQ#6Pa43ZZl0{(Xt-6Slt;}`ϟNF,>]*C&,yȤ؏LA9uz>1{FL)=L&ծv.q"/QJf ^[,, 2{3~|p7y:0`eA>0EJ挵9#Z8f["_*8}i 4E* (@U r]&•K1yvqb`:$/_fC

    S%3-b {t [_ Q2,N6o^Uqٴmh;A>_4*Gj#W 29[2sA&hǓg%z~|ueg읿=(czf,T~4m;~p~?e^f8++pK0-hܪCirnխsq?8BV2[꫐׵mڟSdkmF"y;\G8N$_YXN|T IXepZq 4JZ|[}Yt[_o0$5R9yʬ@5f -)B' W8jE}%Uct#_q~z5ǹr$T+WA^v^nS-p:@#8ʮ_2?;= ЪFZ0y.i?nh<W-:]O1X7 Hr7kxlWMgzlUz `ns--yzO]37x#ҪI%B)M=!~c+#$KY.+B92L"BT& 4W1OCVRyru} UܺE,߭K$hf]">kQUod@p&Q>\~v:͂H|%މ#VR7YSSɚEO+R5lP9|꼀T$M1ͷ "{\~js9IePT_\f@(w&FĪ;EDr!#όs{#cn,M,q%15n(!„HcMFP%64E*]B2ڔLuWr-pa۲U!+:/ Pu70 /Ǐl$  G[ գ'Uq޲~u}pPN:#HRፎ@֢o|(8eܰϓrq~o >oZ~ǡ.1ǠϮ{4O%Q \әF? wtrΠkH;t{0f-a!mu ︪o2ì,89s:}>-V/|E1g +}v 5wtuϠi;*v{ڠC g44`vƓ:!=˸[˨iD)$OϮmS)zEިN"o⢼"͠:BZZ K>};;u'RsޱYvqufomyQWqԶ4>Nwo9.ݥwW `Wlt^nlzZ{l+7i{Ыep]%+]j]VsnD1eBt,D]},.=Kܥsa݂B]:Wi չgdwyf>JZgR֩:whϱNekf-.;kqgfmX|~ͩ޺wZ[i8F6 `l5Qp$kpu(J֌9#EJ7)OmF26B9na0nyEG#DgkTEmF5UՋ*7Yuh>QlJo>B"N%Q*_0*#x+l8OCb)#b$R"TZ@WNay"Eum\ 9uBt;9 NsgyJ^jRdJC5Pg=GCr ys 42)bf%Ѕ^ùzs3613x}(>+aU̜dD{E{Vo HԱ05`Nɳ[>M}fٱA- |`E@ HQ` ,G_yKZ| j8's\ɹEL۶٦mmۑԳ18XWI* j/'9Zb'cV!6]bMg;{ÉzP`BdVRD.9B+\46Z m -aI)V!C#X2p-AaULbD!`rI apANi0gTߛ1^թ ++v'EQ2K%'PZkj^ y4Q,01E&B'T0 ALE՘R!ηGu$Ur.eF$0!!CPXMu_]y^wIkbxz9/>>͞x0c_>s(n9Hْ{nB<Bs&xK\qV+7xQX%9Aکh ̽~uNh2)Nr+De6 'V3u(>2Ya Ӊ8f$LpbT[J Ɗi"06dw#zbHX"B SϷֱ &A>Rv m2F ]s- 6* 46`*ၡ]: fq~YqmEsMxzT?}N ?2co,yg_E0Q ^~\~+d1L>^]e,*DfTt;]?33eãOTy@a}16_pzLQ:xj,(èxvsAO2B@뷿It]/Bo81"?@۔^\Z =ѪVx\ذ+t+!膿wʚ}PUT8x}'oWHш{ew1cv,+̥g^ (ގ$+"M72_O y$:n'kPxڭ8%Uu !_n:WsK)9hRN;r)O[h{5p鄃z{sgpVxnp&@ἤ9+O7fpU5TXR}u$ V&+ Kdlj-ikPbK{[hb\(c8=Sqy׆KJ#G3'W3>_O㏷iM>;P pq WZ WbJub[Wy,' w0(տҭW{q,]01 lžI gNX6ԥJ mSIh2-M:7d#٠0πc,z%m sZvo [&C 8(YAnXT1F?&r:]w]ewgl }桘>z]tx<.G @~PeM{՚+#1ל n':Ě Af m{9eT gU%tp.Akc%̒Z,  ZSbBW;AwSa_4$KhRO~bJX%60% k(45QϙA IU/x9q59m @;~.yi}#$W VDŨUU'$yUaUr5(%@ Q[?8 <7Z!'CUib*c4Wck5e}D9c͐h],~I\Of7@5\_ 7Uƭ$crMړKѷWX8O^&c/9S0>Nw T1ZWgMhl/@` 0PW%LB=X2`GBNf%1+吮q DX m-#)'4X{.͂kr7MMZj$f kSsi>~WgcYͭp|]Sj۠F-@iU#M/K0э~0x ; i\ӟUV('$va,QJ@5 (BSUր H:T@oo@9ہ lhSW͌z+;T2nd~ DJzmsXdo+@F ?^e WL3 *C*P$BREV#%eNҬ;([!_:< =!uXnLz?A$&3):"ʐ̳GyMg yIpǜ\( m C" p#EO| O')E{MzQ, S J7>iq'>7("اq=pkThNzF(Ԗ1@4HmVs,)BZ U;OFhj М͞ Ǣ3sMpliDH5"P6#&ԪR+W <;kL5% $OYQIRDYHƠ824:%(4JpPCkLkX^B@1P. XrK;R\` kgSN0O4j8̨>D`\(pbi͗f@{o7s86fRDҚ/́p&c<ưh(3T#F 0>7:Lz,k{Rn3 5 p#LUTs9 PUJdopFq.Aex}}~MgrݿWuܧyc/CY4 BA?zoȷNz85~NF'*{}뵰o`O[̈jA6}!L&AaJiIIV&l ȯ1uĝU^p8;?NPgU!w"I4I퇔W*ǝn3HgY[p%LЈ>%Em_ul5hHW1xh cB3F1Þ5%w ˔N ap!djZ $S;FHup_nQ-Y/AJew?L .^_%BBr-) ^"H`]ytDf,HÇ\-$ZH.dBzFXٛ𷤇^I3LªTמj?jO.{(9Q1Q̴FaSP쥩m~F,ܤ:e8+Rܗ#JtG؈w_MHBBr-)&o:v+A蔾#G%Fِ[y]Dօ|"Z"S ?ni7AA'n-XvmhBZ i?9(S؆D3vrHφdt8 :lu8SY##s I np"Ȅ uMw</.jocjfcØ6ĿYߙW]PUΌlL6EpE6-ޠ1ԬK{ &@8− *8'*ᬠfzWz2rqǔwwS1)O>$GPN & _HCYUϫ"A=w~^ HHp1{6"8<ƚ+NkߒDjKr99LiuxΦpdžj@(4FƄUg{iq8P+}20&2 S}N5;'~T)P:N QN(+H:,"S͝qِs/VlU푭Fh@PltvH.<_@&fǰ9K<.a"^^uWk2f V`w=5*w=3%8fRbFgM$5sagD1Lo'0iX> ɴ~s),>1/H0PFpٻkPQ68q芥8{2ۇ/i2"k7'Кz4mG/S~ҏ[6m4Y ҊTJro qkwV艇pŞJT1* OJ0@p dOh-R@$P:M -.]ZD5αh˥kmF/}H&Y|ge, v7{F#`ݲj[G$[TwbX#TRwDyK>O&OV BZ !-뗼=Jً쥱'nȠG{x4S0qJq~}Uȫ'PrH"A!!B s)r''0L|/:E-C>fk E7.N8OGP\6,wPȕ,HQ=S|k *ՎYrFMA WeYg*A+ f)s*n[9@Sj^Ogm_9@4'sv{S߰=%t븧F**Z@Zوjz]YՒӖ`wu":7! Q `U`6jW`4drFI~9tk_αVIT-j!!90İJBQB-Ɉ(Ӑ r)FU򄍥8)8Tթ6.cx ܰv@.֩Ʃky(NJZLCbE9JC)~twu y44#@k]vl.+T_Pz~(q(o Si~( {b z0siq(R(,<-h/P " H5N]Q}AY0rS(& H5N]z(_~܈SЕCnTsE.(=o]+so9%JwQ}N5Nݨ˕|R&Pʊ.e2 HGKP G0@)0Tk..Fa(w=@a(-f@.(=gqEaM]jZsD)a(e^+ҋ7wu JaQ}YF4( 2OQ՗eT_ @i G|ڡ )͈c.q3HMbsYor!%y Z@Q2eN*4fЯAAI?nDڨ7MhQi,&Mx|xXNNj__;*b$nbj F+s  j'7[8lk2 Ǐ+~kF/S\ 8]cك ?H`Q5=7#|(cKAX66VAgs ťYwC3N?Mxo6CD}z59^ݼeTyA 7^vƲQǒe9@Jj^pt5ю.E9Qw^F2飓. FpbةpzrX:1gjf$3$YnT '\0+ʩ 2%LE֘) mFCDgݾ-RQrN<|&L%bI2ͥ1Z2AS3-,JhE̍sQlTWg>ʄ}g9C{/u3}:MiW3d1}U$<>>)ɔ#8FJA]~ٌXNt: 5zW95'L3%Rp19]P 95|VD(k5buc+ !"<:A".nz &7b8ST-j5/c: _TXST9yW +,^1їGH8r7h~܈jZITzu @.XBD*Se2ʦq 'h_  ěX8S:Tg+z #^}!“y~W_^ K"irTg97YKdyx"D*I9%D/>}Qg/CYr.&kH(%Zdw ɔJ|ui":B*pQgRt/Oȯ4c"Gҋ[^ ݾJ47!#j+۲Vߏxr4/f Vt}ɢl79\8rqp}swda5#Zrf+q6C=a s|pY6t5*7Ѷl9я.'URO<ꦞ ˇ /bQR't=y+4^fl e_TX 嵸/uZӿw";xa@ ?EEe%|Te_wMTvjX@'M//S:n=[!C-ηMk˃ľv;! δ[5fڭ |a*U#]zU򄍕-(?О|~-xp37/䈔]?3J'afQO(i ͣS>H5^69Pr3fess#?bts}Mheuًc3`> G<`y|91&ٯ1LVp!eD MPIĔNr$>D* 56==r)uY@,˂J ߈ɬvy0Sgd>MLb^RS!G&/ig\@Sb % WEjy˴vuyǜ? 5͙?ؔ"M.:OA 2 X TefԠ_]lVS+l~d^jbd9yhg8vCR 1FSSԐkh>l)j^=B=[ V (xR "-WY۵_ ?j|dwuZTp"'? w"fm<~ }o]^2 h|iPT&loŢwmm%[ qi4S5SRdR,\%)N忧A]Lɔ|s(ў# @!8fֱA`V2[.OSNS%39^,-Xr_=MbvoiQ%Y2,Y2 T[!~caW0exVi84oV`>?ٿ60ՊC ׃3GsH~jr1&971~sov# jKNQ<wχǔ[Ě'ߺRdH=>}~9v?9މMOXEE#iRb\K賽8hā?@}߼7781@7H:R^)SOʹ/GŸGu !8i{p5mVXϻb`mvSvm&F7 h5~bɌx'&&&m - Q+5%y},BȖ A4*P;IYS$gC ~_YI9}_SG љ&r{:c}ZgMsXaSyR& tT]Dߑ~VRXDNxu" jʇ@)VTq)eot|˖) >i@t *T"Ҿ$GAzyԛHn$Tzd}+CeeyR^&m"_'tAΚx1l#[[<{r.I\\lG$?\%JJ)5!mـS) RS6>We)?"V:ӤKto9yw&p{!]ц,zxth_?Q>kw _eLZ/?iQH-"OQe^lRwa$lH~w_ڹΰ,RO;sv}oqKϹlWHjK{ @9z{'B|ԍp- 2I~*ׯF(JA;ck,1OAC_x^2"hndQh4":}߱v{;ߋy _MO>݆?)oAZ7n}iEtcvAvQ !!p}$S JUJtaW ~ޭZr՘d^܌w$r}{NrĊ'&,Dzis,NT\ ,1z}w{!Xe/E_dMv .(8뷶Aȧ UV# E@ 5Hal8aο1oe`L e}Ѡah،O?k*o<q|`t$۫ϳUlAI1z'љꢎ.zCtf1B=az`~(X˽Jah1YT 0[`SRpp.W_u̸z'ҊWO9FcDc伸o'[fD\?/P\ը],֫5Vn)L&!' Bĺ$XAVƘ 21owח )B*ق}V^-7E@!9L;_El"mrf؊+[C<_.?V,ddЪe^.K_0RJ74SfSgMT "ye7c)VʚøS})A6i:v浘R 44GI*1_qEo< 8{y$v\ҶzxSsUdQ"X#{ "F"Qch(q9F`"4EerAj)%9{jiHSN%Q}v}ǁmD%t~M5dL@aYգw}U j#Bq_t= >x{,b^-yp_p'Q%QeUqTl%!HVƏ/UvTY!U:O sd_#k*?DYm ø[}i I9YX}]X5cO'٭'*b/IRrEndX+m~Hd8l7Wy$R;=l,+gaH-Ҷ*4g_zjKޓY"rby[8씓p\с|ْS.t4tY m!tb)Eb=t&S-GC p:"z}괄P%\"1c(Α.Z7"tڏF^Y-yCgSF$(f*YD0 h@8KnV}z@7OZMb6 lJb4BĤ/ް Q+lo t#%z.ϋph#0:1^}Ɖ}֗2sؙHE$Euo`Z34R}X Fbx~R\ <-SN)Kgw/5re֑&ȶ+ZRIB0YRbq@:XNFNN䓷HY'2UDYPȪ^T5EP<1usEռ:rd-:%I1K7--k?7D- ,ț"6.oVmMh90W ^E2hvT: o6s@gﯬdvregB[GdEQZe !/,ga>[1Z -QqQPb5s.RYx7'x:mHrG+hY@a@ް@f9kbƑvl7z+ f zƤVOʠM{CNyl{fa@Nz历W`$̤yX7Gk=x\Lr2+%'QfjpE]j¡o u8+0iz.c2o ʞb ›E0[ɼn={jlzON{Scq L[*UTHt2w v $ߡs'FZN|xy9q8'HP å`o buh zܱ˾+O}"R ՠDO*xrlϓ^xY_X勴^4!Y$R zT"r$$Ŕ(&2!wڍcnFRZ: ~Xt>z-c̎:%B6Jlahhu08O7rSZn0g0yxw/}E5rqv~.֋>~CT;@dڤ;<dZvlq/Ot"{S <]ƫvJhdK dD f3#OȾ9zC'1oJӁ-.VC)Xu(Ā<|j6\]21D LXn=x펌a;;Tq )p(R\=N (R+Z .'H YpB GY*P@N5!X zģxo'O 5U,;u>7ڒ:_>ZAi69*ps*:nHڐꇂdZn;HFm/0noOtI'㔰%}@"c / XqIQu  L_Gx,,{ز/6C_wT#R4to?7[CC="eZ+dV;YiTTGhjތuDŽ<ܞsr`Nc#8B}cYE^[%?j//vŬ-$l?v[8e%\Ö}YɿbK0^G=5gTtd"*jlu]|\NjF]\ĥgu{1H"vB4^Fa~r.vºC$][oǒ+=d/XF v|^`W6oE8}D52 _Uu]ktB<ŧ/.8H\OL<'# A'l,Bd  x^44P3sn'~DށW~[坆R1/ˉ/%z*\:"ﰮ6uOcq-P<6Xt)WN刼sb$0޺k7.OFp)Uey&>Mgus?u`RbV Zm Uැ3#Č Z?d؃㉊ŽY^m[, 7*] X"hFl,t>H갺nʆ@e *){b@hkvO r'Y SpK<17 ,.aZ1&9Gp ˉS0Ty֌][Is4؃]|(0Ek՝k`אh[pQb)@Il *2|]C`2pGNJȌ'-1_ON _@?%R^ N_}v+xr$9S|VJaf@0]i),\t2ُ"D#[nVKS KSQ&\.Ht2f)K[iڴ؇B7kEa1~WaI[ %&"Í;FDx7hqw4u[tQ9rrɫ5%u x⌠Mk ;TقNa+woݠb,VkkV(|*8%+:}4p8Nq?z%A%_0pJemHFtəHFLPxe| ~@@J iaq0O LmǪ-m-2޸QWd~bx`' +H2AúZ[cF1v<χ_;.Ғ" n !Ê{F@s0k䥴m9 j/SV݁W)Km1'x_n%aU,G0C\Dzy{W=ym-dlgOU4:C e$v0J:v\Ža?گ"¤ly>OU&]a^ )yLZE A?^гiݻWKObXpHq"`-JZj`%S\>0ȥ؃GK7%y2F=-I}E(diTB0 {Eh=ˊ"{ <.lekj6>Iht!d 5E؂(Vç~Iox.$8ZIMF^:##KnYG!U$oT)CB *q&"3J̛jVyצ<\NOl,pS!|.$.O<қ ]h!ay &f* pX|/jzت8hZn\M\!Qe!&^EPma3掇}~,b4Uv”5+yj\nt `-dU\o.&!T5 pUh^ө; #'<9{&$vnv~3㼘)XkZ7(umx'y`3h5ϝj9[-Oe.V8<7= ؉5eItЏ:8 x!rjS@NCli!/e q3~H$ǚz7vs`.i>F`Vx̒2;a|N;GLU^F g_5wxai8 >&p]-\~R"sQC vVNJ'!L0v #T~3D+B x5#NbŽUL,. ۳ ^/|rue\cc:`-& ]D)/7}rh?<CKu>j'`4tIdƴ ۆ=Ε u?NoHt5VnM_0G`{A.O󙾸;Sm޻U18[(L<^'@ 5o8oZiAx>(f9I!V5\C}-1f)!3@>5_`^ }C;ba ~9)~K<,[]KGuY"ԍT\J SjOc[kHig')L/;e: wtТOWQڙ$_tè?hLfGmgޛg?5ћ35s4k nz蜯 ~W޿|X>><^.w=y`r|kwӛ*%|KmNy<[~$i8d(6@f,L@C5Z.U_Mᾮ@t8!уB2 -ѿ{ȭc3@1BlB؄OwԌEE{E/!zz=i$i|뵅+g`qLl5ĩŊ|Ř ^zD K2(1 ^D?ih32ڳ9*4{SxfX[my]-Xdp >kHSRS/͵l OO/f@(s?n馭NgZ:sXW@2ZlNi~xLA8[¯zi9]2΍N#ށ/փaR,׋RQaJVik(p'+9ŧѪo1Rˋfk%mS,2Z#2 </ӔO!$pV~lxKWJ|9%:+me6x&@kmuTr ֔Mr6y}8rs4ei׷,Iw)ncQ')$',kdcYp֔(.0QRSBժ! P|yUmAQa5OCRgKl!44VoYkHUY$~gcu|V띗/*Ro\s*Q3$8e=H8<5AQlogO͟ϻL/W(uaoOcXPs by>i%ŝ4=ƕ06~L`,ĵk憸;UpIS~4,YjcꭃgCMVi]Gmz7fޫ:c q5[dz?#YBOɮEX/c&2cIʷS=CQCDZ4D3UUWWUWu]MnKv@AQ0Wr}2GN~e ^ n.e5\&/ʤ? ".C$ɋt?|E˃#JDȦ|GKl xak KP4RnUg^JfkzO*w8 &a >1جҿ `Ʊ2wvr]EOI4dtg,x(Qlq/W̓v+Oi 8ٺMɥ)`HP~|zRgH(I,Luhrǫi %+VK2x䃞k$-n[ĹfF@4ebSY(RV*qƹC5FDXqV/aw1 nԱ7gPH3a6=>* >wхz\?W?ѓ2֐B bDs%P%'@am8uڰ 4Y[gfi/JD19ǁxMmlhGR^z< t"ХxRn'b0&X1SP}Ig?ÿ:q2)_醽-΅=*y[gg;}5XEJELQRG\ _%v+,?Ց=e~s#hڭ&q WIJs) mt-J~/|iBj. AԢuߒH91oMTaIBߙ<ӂ@$gWFb,Y-|h֍}g f~+kcY++íqz+-ZwfJo8,?=l f㮀9mЍO` hzg*W\ԅ>T5yIswJ0k /v\^)dށ:$9'&="i=n\%Jv_G;9}ip@8)j}ɘk)[s*ZopWw;/=OKـ[ unMnӸ Q!Qm\X7*,Ba L<4 ηGocژun8RJ_+ڍ}uܽ ܿΦTJ*n~oMENKf)z LGؾz m'oB@`{_r)Т)>k_Ѭ~47Suj(oT-Y_5RWʾ7ƵwLklu֜orBY>xIEg|_d_ep?\?m?F4XqSACjPMiSVO;⃦(g䇇Q 4><5j8u1^ t-:jg`7˘g57Vo% y"D8;Gݴ ݚҠ4}&.੩\mּvkCB6))vF .h4#:M߱v{H/BJomkW nmH#b5eu9|+celmqg4+~ΑN> 0ZIwWuup<*\u)ܘұ΍rEJv^ Af_{!EKIH3wR")C*`fzyIk7 '7|.i4\G*!I8ifG4i8ν]R죲Κk=1EOdI#I9d8TV3 ~($sE- Xp+E_ur}h z"H>!PYX~JcM6¸3}} HΓdނP vvǷ"5 A-Ɔȝ5MbJ+u~k2y\\ƺi_xѫtv"Nh:q.tZE` 41lHŚ*Wpl6S4w'(Hcۍd*<؏QV~Ŵ).a2֋$7("ܳ n=d3T<3&q0f+ДҔ]`JfJa5TtI*)6lJmvd[_i ݀U+J4cخX1q鮘!+ʛwSxJζ\')8wI8ð %J!=G*%ftaM+`*VꟋPz%51N90à$MB#(2Qdz=͗cnjx@"8s`OxK5a ^vi[mt- 3:Y A90x87S&4%Ziw+!隐tbXkᐚ"p 7znlo\>I&ku9K{+5:=C $a2xJ\H&M9R܂WV%'AA=zU(9FCpP@ϥk8m]B2ھ|? apMAcB(JfBt>FK!){e?VvuК]B c\ZRc$4XȁX-}Jg8xcW 8a= 'ĈV!Bbu"H'ߥ[VG}B0BBtV\9!X`GPNrV, n ] ='ĭD}Z(.ۣ5E%9E'rdƹI28[lb>ez% FQD2AJXܲ:j&P%rJ7 A ]KUA"SeA",wqP{N[7 h!aՑ{E5dO*-3ܩ6ɑPN!8XyD,HH4W쾾Ŵ)^ťdoqlVF#bnq(!y4h,LK k/j)r=\"c@sM&Ci{A/֐(\"0b{RcZ~Tg~hL:94O9`k DkuMp#yDp:[ݺMJh F7okPd!%Iv#)sP Y"Hqy`&\ 2@7k6lKB,(F?!ૂwH*,^p͚TR*nY,LFs*k+Uoy[U^הRNF,۵y\SPZZ(fIv̿|™fϯ }G~#ȰWrr7mgZn߻KWIwBWIHi%R&0K=9=^61A({{=QІ 2}~rc"P҈ɏ/{ ^6A,'Yxܬ  7y-ϸw/բGY<{Uu,Ë8= 5l$>4[fLd<S6%Cd%ӌ%AI6 aɗ*Wkr͔Ѭ"`Y`1L0b-v\,{u;`( hLl2w4Lb)F2 v8/6K\['X 7JK *IyP+(ȃE9z5&J9`/vÀY:L*O^1)rFNvGó_f'b_fg\Ks&M.nȂ>C .c 'a!i?ʱc6:pT `^“w\G!f1yuޖj*+i[o(=/W8W9:禅6ɹrp=<gSTb,FZf'ml;(6dؖtЀ-Jny\,{9?{3L/ApXB[c&rU;9,vxcx5ZoFö%01MpF`UR>N( K og|҉pvŮ6Q>Ԓ'G#^S*mͩY 3%4*&d${5BOpQ1NYGa| 9=VWڳx D@/ :h,[cwsIB u2nB`E46B-w:rϡ˧/h<9Hy5g;=Z6< ֫^\=i9g!1vFN]SI&E>;?=J)[;o^?;d:s`?=MTPIή~g4'/C'nn)_\ *@B+'?aq6q|E{W.!DN:&YwN:n2Wwq\h4%TsV7K[P TpQG,4ݝ?{6 )nBďO[YM²Vo CK0//;gx3*= CYy3pY=cs/ uE$p'IH~!/_774gNNG-ƋY}5βFS_ҷW_\n@S>~.adq*\Ĺ6ˍ\?9~qtAv9*(|ˣ}˯mVyF7{g_&m(Ѱ3%vwK;{mD nLJ\Rb%m0l~?a]W^"H̒ C)Ǒe'-'ZY #<OvGOReXMFr8Ϝo4Z|fsq))ڵB(#Ƥד1_UnC v,J@04+%+XۤR/m|Zތ招Oܪ(Y8&2H$02#ki)RI%U6 UM@ 6RO7vb`#S[?9QxJT~\Bdvg:KmN8F% ֙3ŭ!Lr acڔZJ-/҉Ӧh2̭^-P* u#N* Ÿ=hZkUJr[ `] |R^}׶(꺖8@"D=#rΓ,Q FJiܪG()8cVbyYVZ.p_<_7K2T+meSb D|H_/aMi#<89P*.uˏ.}:L=96y}e,-F|4|%("*Ȗiy\dUos"_W-^^2StDxy(cҌݛ *62(%DDePbCۢt 5-J)XtC9m ›tzˤTPi OJU'{ROkOvk5(e: ^j*Pk R$Վ}N R/5SCTsW|QT'PӦFH3( K!͎k gQ"\(%БLy&,M^Kbb@̒$Chaxyh4R+ N6ӄYzeO0xU?WՉ.ncҫk v_d }- 4QkRa#^(?I;W$h`\}ڝhl!5_+nW-VUҪgYH~Gng?nfN[׻G/ Y4?Lr+Sp`\N5BI*D$Dj*F9.@J^WpCk#V`nw}dP =faHJ#%u|^ݔ_le[~Tj ~~ifR {GwM+?:p#+fLrb5%6Ubش׈ Gn0i }2UPw;B~p) 1ѽ] 14921ms (13:03:40.615) Feb 16 13:03:40 crc kubenswrapper[4816]: Trace[1724326876]: [14.921191487s] [14.921191487s] END Feb 16 13:03:40 crc kubenswrapper[4816]: I0216 13:03:40.615853 4816 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 16 13:03:40 crc kubenswrapper[4816]: I0216 13:03:40.615991 4816 trace.go:236] Trace[162302760]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (16-Feb-2026 13:03:25.608) (total time: 15007ms): Feb 16 13:03:40 crc kubenswrapper[4816]: Trace[162302760]: ---"Objects listed" error: 15006ms (13:03:40.615) Feb 16 13:03:40 crc kubenswrapper[4816]: Trace[162302760]: [15.007024314s] [15.007024314s] END Feb 16 13:03:40 crc kubenswrapper[4816]: I0216 13:03:40.616007 4816 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 16 13:03:40 crc kubenswrapper[4816]: I0216 13:03:40.616505 4816 trace.go:236] Trace[283278010]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (16-Feb-2026 13:03:29.999) (total time: 10617ms): Feb 16 13:03:40 crc kubenswrapper[4816]: Trace[283278010]: ---"Objects listed" error: 10617ms (13:03:40.616) Feb 16 13:03:40 crc kubenswrapper[4816]: Trace[283278010]: [10.617185722s] [10.617185722s] END Feb 16 13:03:40 crc kubenswrapper[4816]: I0216 13:03:40.616527 4816 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 16 13:03:40 crc kubenswrapper[4816]: I0216 13:03:40.622093 4816 trace.go:236] Trace[623409156]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (16-Feb-2026 13:03:28.476) (total time: 12145ms): Feb 16 13:03:40 crc kubenswrapper[4816]: Trace[623409156]: ---"Objects listed" error: 12144ms (13:03:40.621) Feb 16 13:03:40 crc kubenswrapper[4816]: Trace[623409156]: [12.145092594s] [12.145092594s] END Feb 16 13:03:40 crc kubenswrapper[4816]: I0216 13:03:40.622119 4816 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 16 13:03:40 crc kubenswrapper[4816]: I0216 13:03:40.627853 4816 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.324137 4816 apiserver.go:52] "Watching apiserver" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.327696 4816 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.327978 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.328488 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.328776 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.328996 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.329161 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.329627 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.330046 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.330576 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.330583 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.330756 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.336640 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.336744 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.336764 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.336648 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.337011 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.337997 4816 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.338504 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.338826 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.338934 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.339581 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.350643 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 20:48:33.687008384 +0000 UTC Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.382636 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.398553 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414182 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414229 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414528 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414599 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414690 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414751 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414804 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414855 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414905 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.414954 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415006 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415057 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415104 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415107 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415152 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415201 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415251 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415265 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415302 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415384 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415409 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415434 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415368 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415460 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415483 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415503 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415524 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415544 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415574 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415596 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415617 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415639 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415697 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415728 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415771 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415791 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415810 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415829 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415848 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415870 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415889 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415910 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415899 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415929 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415948 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415968 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.415989 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416009 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416030 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416052 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416047 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416074 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416099 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416120 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416139 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416160 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416183 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416203 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416223 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416244 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416263 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416283 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416302 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416325 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416405 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416431 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416456 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416451 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416477 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416477 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416549 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416579 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416561 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416603 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416876 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416917 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417005 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417128 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417181 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417233 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417291 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417343 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417436 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417490 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417910 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417962 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418006 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418043 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418079 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418115 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418152 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418189 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418227 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418262 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418297 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418335 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418371 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418410 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418443 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418478 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418513 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418547 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418589 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418624 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418692 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418734 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418777 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418830 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418886 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418945 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418993 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419034 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419068 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419107 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419144 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419183 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419219 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419253 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419291 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419328 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419367 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419406 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419446 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419483 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419519 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419559 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419594 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419632 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420703 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420779 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420834 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420877 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420914 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420951 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421005 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421050 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421090 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421127 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421164 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421202 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421284 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421398 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421470 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421525 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421563 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421603 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421638 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421710 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421751 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421790 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421833 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421868 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421903 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421948 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421986 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422023 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422062 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422101 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422138 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422174 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422213 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422333 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422370 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422407 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422444 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422483 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422527 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422586 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422646 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422758 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422804 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422840 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422877 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422915 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422955 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422993 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423033 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423069 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423107 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423150 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423192 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423231 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423275 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423312 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423348 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423390 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423430 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423467 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423504 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423542 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423581 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423617 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423687 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423732 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423772 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423827 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423890 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423955 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424006 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424043 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424083 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424123 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424166 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424203 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424275 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424322 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424364 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424407 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424450 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424494 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424541 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424585 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424641 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424745 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424789 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424834 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424877 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424919 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425014 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425051 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425088 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425122 4816 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425156 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425187 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425211 4816 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425235 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416806 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426164 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426169 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426280 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416966 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426351 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.416997 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417071 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426377 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417271 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.417620 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418083 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418143 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418138 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.427534 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418279 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418342 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418601 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418739 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418746 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.418860 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419133 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.419574 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420101 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420169 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420327 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420417 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.420609 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421030 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421059 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421120 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421150 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421179 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421235 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421298 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.421286 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422298 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422783 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422917 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.422960 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423626 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423675 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423699 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.423962 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424122 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424102 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424186 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424341 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424593 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424639 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424842 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.424931 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425141 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425177 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425213 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425228 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425574 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425593 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.425971 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426124 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426126 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426420 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426543 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426587 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426628 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426757 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426776 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.426796 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.427121 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.427834 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.428537 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.428719 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.428712 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.428774 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.428821 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.428838 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.428882 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.429337 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.429398 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.429484 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.430072 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.430411 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.430623 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.431337 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.431536 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.431721 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.431891 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.432134 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.432151 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.432288 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.432709 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.433064 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.433203 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.434368 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.434428 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.434444 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.434535 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.434545 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.435002 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.435108 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.435131 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.435150 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.435392 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.435473 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.435534 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.436246 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.436319 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.436716 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.436828 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.436970 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.437018 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.437081 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.437125 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.437186 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.437290 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.437443 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.437488 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.437567 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.438782 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.439131 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.439510 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.439707 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.439712 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.439806 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.440105 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.440863 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.442019 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.442259 4816 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.442460 4816 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.442788 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.442818 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.443086 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.443374 4816 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.443393 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.443576 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.443816 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.443909 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:41.942338136 +0000 UTC m=+21.269051944 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.445103 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:41.945060647 +0000 UTC m=+21.271774415 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.444847 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.446224 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:03:41.945130209 +0000 UTC m=+21.271844077 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.449974 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.459972 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.461707 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.461932 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.467893 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.468191 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.468269 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.468332 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.468377 4816 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.468484 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:41.968458106 +0000 UTC m=+21.295171834 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.468606 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.469103 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.469877 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.470199 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.470232 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.470251 4816 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.470324 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:41.970300084 +0000 UTC m=+21.297013822 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.477223 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.478536 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.479494 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.480516 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.482267 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.483786 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.482910 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.482836 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.482836 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.483960 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.485000 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.485637 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.486190 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.486869 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.487777 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.487947 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.488108 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.488407 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.489813 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.491495 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.491589 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.491724 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.492298 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.492472 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.492537 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.492936 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.495169 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.495245 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.495276 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.495452 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.495509 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.495519 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.495606 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.495682 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.495710 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.496115 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.496340 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.496601 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.497552 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.497580 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.498087 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.498178 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.498224 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.498327 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.498345 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.498840 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.498901 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.499004 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.499067 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.499133 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.499146 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.499204 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.499242 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.497999 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.501022 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.505807 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.510146 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.513146 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.513886 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.520361 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.520917 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.523290 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13" exitCode=255 Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.523835 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13"} Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.523880 4816 scope.go:117] "RemoveContainer" containerID="6bb942ea75ad6becc7f36160f8659499b179be70df98931724c8c43560aed05f" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525522 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525581 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525646 4816 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525682 4816 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525694 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525708 4816 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525721 4816 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525733 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525744 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525756 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525768 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525780 4816 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525792 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525802 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525813 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525825 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525837 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525848 4816 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525875 4816 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525887 4816 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525899 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525910 4816 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525922 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525934 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.525945 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526050 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526061 4816 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526078 4816 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526090 4816 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526101 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526113 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526126 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526137 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526148 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526160 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526171 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526182 4816 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526192 4816 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526204 4816 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526215 4816 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526226 4816 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526237 4816 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526248 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526259 4816 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526270 4816 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526286 4816 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526301 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526316 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526331 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526348 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526369 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526384 4816 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526402 4816 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526417 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526433 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526447 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526463 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526477 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526488 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526499 4816 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526510 4816 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526522 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526538 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526554 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526569 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526586 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526604 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526615 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526627 4816 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526639 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526650 4816 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526682 4816 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526694 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526710 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526725 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526741 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526756 4816 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526772 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526788 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526803 4816 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526818 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526834 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526849 4816 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526863 4816 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526878 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526893 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526908 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526923 4816 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526935 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526947 4816 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526958 4816 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526970 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526980 4816 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.526991 4816 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527003 4816 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527014 4816 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527044 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527059 4816 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527077 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527094 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527110 4816 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527126 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527140 4816 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527151 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527163 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527176 4816 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527192 4816 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527208 4816 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527224 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527237 4816 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527249 4816 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527261 4816 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527272 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527285 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527296 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527308 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527320 4816 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527331 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527342 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527354 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527366 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527378 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527390 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527402 4816 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527413 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527424 4816 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527436 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527447 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527459 4816 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527471 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527482 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527493 4816 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527505 4816 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527516 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527528 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527539 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527550 4816 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527562 4816 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527574 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527588 4816 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527599 4816 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527610 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527622 4816 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527634 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527645 4816 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527679 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527692 4816 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527704 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527715 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527727 4816 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527738 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527749 4816 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527762 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527776 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527788 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527800 4816 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527811 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527822 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527837 4816 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527850 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527861 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527874 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527886 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527897 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527908 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527919 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527931 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527942 4816 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527954 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527966 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527977 4816 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527990 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528001 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528013 4816 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528024 4816 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528036 4816 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528048 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528062 4816 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528078 4816 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528090 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528103 4816 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528116 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527973 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528130 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.527568 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528047 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528161 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528175 4816 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528184 4816 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.528193 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.530297 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.540884 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.549507 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.558680 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.566923 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.580576 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.581303 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.581788 4816 scope.go:117] "RemoveContainer" containerID="b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13" Feb 16 13:03:41 crc kubenswrapper[4816]: E0216 13:03:41.582093 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.591918 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.606215 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.606388 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.612318 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.619825 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.628955 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.644528 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.656635 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.657756 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.669078 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.675200 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.681200 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.683469 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.687487 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.690469 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.691967 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: W0216 13:03:41.692420 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-166c45656ecf41cbb41c762da4387f2ff1339144f341b80b403fa723383c0455 WatchSource:0}: Error finding container 166c45656ecf41cbb41c762da4387f2ff1339144f341b80b403fa723383c0455: Status 404 returned error can't find the container with id 166c45656ecf41cbb41c762da4387f2ff1339144f341b80b403fa723383c0455 Feb 16 13:03:41 crc kubenswrapper[4816]: W0216 13:03:41.699111 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-34f2fc3bfd5ffd1e53482016482e4a2212552ffa3992d14c5719a87293e61d69 WatchSource:0}: Error finding container 34f2fc3bfd5ffd1e53482016482e4a2212552ffa3992d14c5719a87293e61d69: Status 404 returned error can't find the container with id 34f2fc3bfd5ffd1e53482016482e4a2212552ffa3992d14c5719a87293e61d69 Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.712455 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.739079 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.752089 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.766484 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.777179 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.787542 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bb942ea75ad6becc7f36160f8659499b179be70df98931724c8c43560aed05f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:35Z\\\",\\\"message\\\":\\\"W0216 13:03:24.711049 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0216 13:03:24.711377 1 crypto.go:601] Generating new CA for check-endpoints-signer@1771247004 cert, and key in /tmp/serving-cert-2298724244/serving-signer.crt, /tmp/serving-cert-2298724244/serving-signer.key\\\\nI0216 13:03:25.126798 1 observer_polling.go:159] Starting file observer\\\\nW0216 13:03:25.129127 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0216 13:03:25.129262 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:25.129852 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2298724244/tls.crt::/tmp/serving-cert-2298724244/tls.key\\\\\\\"\\\\nF0216 13:03:35.436637 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:41 crc kubenswrapper[4816]: I0216 13:03:41.807573 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.031995 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.032112 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.032163 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.032199 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.032237 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032397 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032422 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032442 4816 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032511 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:43.032489321 +0000 UTC m=+22.359203089 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032727 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032763 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032777 4816 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032787 4816 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032800 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:03:43.032767718 +0000 UTC m=+22.359481506 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032852 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:43.03283382 +0000 UTC m=+22.359547558 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032860 4816 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032877 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:43.032865571 +0000 UTC m=+22.359579309 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.032896 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:43.032888562 +0000 UTC m=+22.359602300 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.351771 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 21:00:15.766138048 +0000 UTC Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.528501 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9"} Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.528554 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab"} Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.528576 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"166c45656ecf41cbb41c762da4387f2ff1339144f341b80b403fa723383c0455"} Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.530910 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e"} Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.531017 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e2a65c84478b5131b2a95a33c4fe180b8a7d0c91299ec4c1e4db7acab982121c"} Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.533349 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.536855 4816 scope.go:117] "RemoveContainer" containerID="b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13" Feb 16 13:03:42 crc kubenswrapper[4816]: E0216 13:03:42.536990 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.537583 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"34f2fc3bfd5ffd1e53482016482e4a2212552ffa3992d14c5719a87293e61d69"} Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.549497 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.572285 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.589305 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.608701 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.619841 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.632380 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.647648 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bb942ea75ad6becc7f36160f8659499b179be70df98931724c8c43560aed05f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:35Z\\\",\\\"message\\\":\\\"W0216 13:03:24.711049 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0216 13:03:24.711377 1 crypto.go:601] Generating new CA for check-endpoints-signer@1771247004 cert, and key in /tmp/serving-cert-2298724244/serving-signer.crt, /tmp/serving-cert-2298724244/serving-signer.key\\\\nI0216 13:03:25.126798 1 observer_polling.go:159] Starting file observer\\\\nW0216 13:03:25.129127 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0216 13:03:25.129262 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:25.129852 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2298724244/tls.crt::/tmp/serving-cert-2298724244/tls.key\\\\\\\"\\\\nF0216 13:03:35.436637 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.660192 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.684418 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.704967 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.718287 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.739131 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.757357 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.777237 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.794301 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.813863 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.831901 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:42 crc kubenswrapper[4816]: I0216 13:03:42.847273 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:42Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.040294 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.040390 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.040428 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.040464 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.040503 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.040683 4816 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.040837 4816 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.040909 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:45.040737471 +0000 UTC m=+24.367451229 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.040941 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.040971 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.040983 4816 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.041006 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:45.040928447 +0000 UTC m=+24.367642215 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.041104 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.041177 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.041238 4816 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.041374 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:03:45.041355508 +0000 UTC m=+24.368069236 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.041477 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:45.04146538 +0000 UTC m=+24.368179198 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.041497 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:45.041490421 +0000 UTC m=+24.368204249 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.352098 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 22:15:38.572041485 +0000 UTC Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.398440 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.398856 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.398697 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.399107 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.398564 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.399347 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.403867 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.404408 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.405277 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.405905 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.406814 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.407426 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.408058 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.408565 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.409191 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.409729 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.410217 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.413162 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.413635 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.414247 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.415155 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.415824 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.416835 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.417272 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.417816 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.418864 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.419321 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.420255 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.420810 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.422744 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.423235 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.424330 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.425439 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.425986 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.427172 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.427700 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.430274 4816 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.430371 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.431985 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.433993 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.434905 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.436712 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.437554 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.439690 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.440375 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.441112 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.441647 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.442277 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.443022 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.443668 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.445225 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.445882 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.446921 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.447922 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.448852 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.449275 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.450130 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.450922 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.451592 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.452677 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Feb 16 13:03:43 crc kubenswrapper[4816]: I0216 13:03:43.540927 4816 scope.go:117] "RemoveContainer" containerID="b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13" Feb 16 13:03:43 crc kubenswrapper[4816]: E0216 13:03:43.541253 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 16 13:03:44 crc kubenswrapper[4816]: I0216 13:03:44.353028 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 08:48:01.849558965 +0000 UTC Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.058001 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.058102 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.058149 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.058197 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058307 4816 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058361 4816 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058317 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:03:49.058274318 +0000 UTC m=+28.384988086 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058385 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058461 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058494 4816 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058549 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:49.058524804 +0000 UTC m=+28.385238572 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.058540 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058596 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:49.058565355 +0000 UTC m=+28.385279203 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058639 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:49.058620176 +0000 UTC m=+28.385334084 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058729 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058800 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058827 4816 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.058921 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:49.058894144 +0000 UTC m=+28.385607922 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.353716 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 22:27:36.697100725 +0000 UTC Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.398292 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.398371 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.398475 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.398541 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.398779 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:45 crc kubenswrapper[4816]: E0216 13:03:45.398994 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.491154 4816 csr.go:261] certificate signing request csr-shmmb is approved, waiting to be issued Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.547636 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a"} Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.563188 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.594341 4816 csr.go:257] certificate signing request csr-shmmb is issued Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.596995 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.636702 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.663879 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.677291 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.698618 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.714375 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.729265 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.740853 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.959482 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-r5z5t"] Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.959891 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-r5z5t" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.961557 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.961998 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.966873 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 16 13:03:45 crc kubenswrapper[4816]: I0216 13:03:45.985940 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.029396 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.044292 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.058025 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.066098 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft75l\" (UniqueName: \"kubernetes.io/projected/8ca840ef-b22e-486a-8720-a7886da10917-kube-api-access-ft75l\") pod \"node-resolver-r5z5t\" (UID: \"8ca840ef-b22e-486a-8720-a7886da10917\") " pod="openshift-dns/node-resolver-r5z5t" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.066157 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8ca840ef-b22e-486a-8720-a7886da10917-hosts-file\") pod \"node-resolver-r5z5t\" (UID: \"8ca840ef-b22e-486a-8720-a7886da10917\") " pod="openshift-dns/node-resolver-r5z5t" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.083083 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.106801 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.150187 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.167456 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8ca840ef-b22e-486a-8720-a7886da10917-hosts-file\") pod \"node-resolver-r5z5t\" (UID: \"8ca840ef-b22e-486a-8720-a7886da10917\") " pod="openshift-dns/node-resolver-r5z5t" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.167505 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft75l\" (UniqueName: \"kubernetes.io/projected/8ca840ef-b22e-486a-8720-a7886da10917-kube-api-access-ft75l\") pod \"node-resolver-r5z5t\" (UID: \"8ca840ef-b22e-486a-8720-a7886da10917\") " pod="openshift-dns/node-resolver-r5z5t" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.167610 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8ca840ef-b22e-486a-8720-a7886da10917-hosts-file\") pod \"node-resolver-r5z5t\" (UID: \"8ca840ef-b22e-486a-8720-a7886da10917\") " pod="openshift-dns/node-resolver-r5z5t" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.182417 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.194953 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft75l\" (UniqueName: \"kubernetes.io/projected/8ca840ef-b22e-486a-8720-a7886da10917-kube-api-access-ft75l\") pod \"node-resolver-r5z5t\" (UID: \"8ca840ef-b22e-486a-8720-a7886da10917\") " pod="openshift-dns/node-resolver-r5z5t" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.212335 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.247316 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.272610 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-r5z5t" Feb 16 13:03:46 crc kubenswrapper[4816]: W0216 13:03:46.285738 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ca840ef_b22e_486a_8720_a7886da10917.slice/crio-77f39b5c98e531092f16f0af2fd941e01dcaff3acbffb186c2895d976a540a77 WatchSource:0}: Error finding container 77f39b5c98e531092f16f0af2fd941e01dcaff3acbffb186c2895d976a540a77: Status 404 returned error can't find the container with id 77f39b5c98e531092f16f0af2fd941e01dcaff3acbffb186c2895d976a540a77 Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.354346 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 07:29:08.286493367 +0000 UTC Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.364630 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-69xcw"] Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.364953 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-f95nc"] Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.365088 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.365359 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.366903 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-flb2w"] Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.367356 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.369915 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.369977 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.370008 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.370146 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.370166 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.370141 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.374601 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.374649 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.374670 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.374988 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.375336 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.380090 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.388491 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.411238 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.427831 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.447555 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.459464 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.469369 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470566 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-var-lib-cni-multus\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470593 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-os-release\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470609 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcfgf\" (UniqueName: \"kubernetes.io/projected/eb19d695-8c09-42cc-bc34-940019ab38dc-kube-api-access-lcfgf\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470690 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/eb19d695-8c09-42cc-bc34-940019ab38dc-rootfs\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470713 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb19d695-8c09-42cc-bc34-940019ab38dc-mcd-auth-proxy-config\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470731 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-var-lib-kubelet\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470747 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-conf-dir\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470764 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-socket-dir-parent\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470778 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb19d695-8c09-42cc-bc34-940019ab38dc-proxy-tls\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470802 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-var-lib-cni-bin\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470817 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-system-cni-dir\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470832 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2a58f937-7095-4c3c-b401-3a68ae936b86-cni-binary-copy\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470848 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-run-netns\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470864 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-cnibin\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470878 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-daemon-config\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470894 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-cnibin\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470957 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-os-release\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.470999 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a7f7924e-16c8-423a-99b7-b480f927bef2-cni-binary-copy\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471045 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-hostroot\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471085 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-etc-kubernetes\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471119 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdz55\" (UniqueName: \"kubernetes.io/projected/2a58f937-7095-4c3c-b401-3a68ae936b86-kube-api-access-wdz55\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471148 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfwvv\" (UniqueName: \"kubernetes.io/projected/a7f7924e-16c8-423a-99b7-b480f927bef2-kube-api-access-mfwvv\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471187 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-cni-dir\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471214 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-tuning-conf-dir\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471246 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-run-k8s-cni-cncf-io\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471283 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-run-multus-certs\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471339 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-system-cni-dir\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.471366 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a7f7924e-16c8-423a-99b7-b480f927bef2-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.484507 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.503676 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.515533 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.529257 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.542879 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.551757 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-r5z5t" event={"ID":"8ca840ef-b22e-486a-8720-a7886da10917","Type":"ContainerStarted","Data":"dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326"} Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.551830 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-r5z5t" event={"ID":"8ca840ef-b22e-486a-8720-a7886da10917","Type":"ContainerStarted","Data":"77f39b5c98e531092f16f0af2fd941e01dcaff3acbffb186c2895d976a540a77"} Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.563390 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572717 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-tuning-conf-dir\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572757 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-run-k8s-cni-cncf-io\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572774 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-run-multus-certs\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572788 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-system-cni-dir\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572808 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a7f7924e-16c8-423a-99b7-b480f927bef2-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572825 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-var-lib-cni-multus\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572840 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-os-release\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572858 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcfgf\" (UniqueName: \"kubernetes.io/projected/eb19d695-8c09-42cc-bc34-940019ab38dc-kube-api-access-lcfgf\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572874 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/eb19d695-8c09-42cc-bc34-940019ab38dc-rootfs\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572888 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb19d695-8c09-42cc-bc34-940019ab38dc-mcd-auth-proxy-config\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572894 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-run-k8s-cni-cncf-io\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572928 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-var-lib-kubelet\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572924 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-run-multus-certs\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572972 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/eb19d695-8c09-42cc-bc34-940019ab38dc-rootfs\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572953 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-var-lib-cni-multus\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573001 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-system-cni-dir\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.572903 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-var-lib-kubelet\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573092 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-conf-dir\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573128 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-socket-dir-parent\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573148 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb19d695-8c09-42cc-bc34-940019ab38dc-proxy-tls\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573181 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-var-lib-cni-bin\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573204 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-system-cni-dir\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573223 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2a58f937-7095-4c3c-b401-3a68ae936b86-cni-binary-copy\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573245 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-run-netns\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573268 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-cnibin\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573293 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-daemon-config\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573310 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-tuning-conf-dir\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573365 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-cnibin\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573381 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-socket-dir-parent\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573423 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-system-cni-dir\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573481 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-var-lib-cni-bin\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573519 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-cnibin\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573523 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb19d695-8c09-42cc-bc34-940019ab38dc-mcd-auth-proxy-config\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573561 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-host-run-netns\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573292 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a7f7924e-16c8-423a-99b7-b480f927bef2-os-release\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573622 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-conf-dir\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574012 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a7f7924e-16c8-423a-99b7-b480f927bef2-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.573316 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-cnibin\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574118 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-os-release\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574143 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a7f7924e-16c8-423a-99b7-b480f927bef2-cni-binary-copy\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574170 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-daemon-config\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574178 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-hostroot\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574185 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2a58f937-7095-4c3c-b401-3a68ae936b86-cni-binary-copy\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574208 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-etc-kubernetes\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574225 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdz55\" (UniqueName: \"kubernetes.io/projected/2a58f937-7095-4c3c-b401-3a68ae936b86-kube-api-access-wdz55\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574237 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-etc-kubernetes\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574241 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfwvv\" (UniqueName: \"kubernetes.io/projected/a7f7924e-16c8-423a-99b7-b480f927bef2-kube-api-access-mfwvv\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574274 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-cni-dir\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574281 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-os-release\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574448 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-multus-cni-dir\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574716 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a7f7924e-16c8-423a-99b7-b480f927bef2-cni-binary-copy\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.574769 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2a58f937-7095-4c3c-b401-3a68ae936b86-hostroot\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.576448 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb19d695-8c09-42cc-bc34-940019ab38dc-proxy-tls\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.577582 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.589464 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.589832 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfwvv\" (UniqueName: \"kubernetes.io/projected/a7f7924e-16c8-423a-99b7-b480f927bef2-kube-api-access-mfwvv\") pod \"multus-additional-cni-plugins-flb2w\" (UID: \"a7f7924e-16c8-423a-99b7-b480f927bef2\") " pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.593820 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcfgf\" (UniqueName: \"kubernetes.io/projected/eb19d695-8c09-42cc-bc34-940019ab38dc-kube-api-access-lcfgf\") pod \"machine-config-daemon-f95nc\" (UID: \"eb19d695-8c09-42cc-bc34-940019ab38dc\") " pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.594987 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdz55\" (UniqueName: \"kubernetes.io/projected/2a58f937-7095-4c3c-b401-3a68ae936b86-kube-api-access-wdz55\") pod \"multus-69xcw\" (UID: \"2a58f937-7095-4c3c-b401-3a68ae936b86\") " pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.595693 4816 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-16 12:58:45 +0000 UTC, rotation deadline is 2026-11-28 02:15:48.893295928 +0000 UTC Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.595747 4816 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6829h12m2.297550883s for next certificate rotation Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.602550 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.620148 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.631742 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.642739 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.651841 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.664351 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.685045 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.687132 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-69xcw" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.695510 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:03:46 crc kubenswrapper[4816]: W0216 13:03:46.698937 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a58f937_7095_4c3c_b401_3a68ae936b86.slice/crio-f8b5b40f58fc71d8a7de99225fc901ba379006e9e206193d0d41d7b49d01d86e WatchSource:0}: Error finding container f8b5b40f58fc71d8a7de99225fc901ba379006e9e206193d0d41d7b49d01d86e: Status 404 returned error can't find the container with id f8b5b40f58fc71d8a7de99225fc901ba379006e9e206193d0d41d7b49d01d86e Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.700339 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-flb2w" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.710775 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: W0216 13:03:46.712400 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb19d695_8c09_42cc_bc34_940019ab38dc.slice/crio-ef17edc223c0f0bf77b34d3b91e17b52f2afa340c069fcadeee72408f169e703 WatchSource:0}: Error finding container ef17edc223c0f0bf77b34d3b91e17b52f2afa340c069fcadeee72408f169e703: Status 404 returned error can't find the container with id ef17edc223c0f0bf77b34d3b91e17b52f2afa340c069fcadeee72408f169e703 Feb 16 13:03:46 crc kubenswrapper[4816]: W0216 13:03:46.718947 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7f7924e_16c8_423a_99b7_b480f927bef2.slice/crio-de5031cdda6b057b26ba24cf9e304415ea04762dd1c0eac818cc972f8e8350a2 WatchSource:0}: Error finding container de5031cdda6b057b26ba24cf9e304415ea04762dd1c0eac818cc972f8e8350a2: Status 404 returned error can't find the container with id de5031cdda6b057b26ba24cf9e304415ea04762dd1c0eac818cc972f8e8350a2 Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.756236 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.795320 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-s2hth"] Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.796132 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.805203 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.805379 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.805467 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.805592 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.805626 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.805797 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.805940 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.813359 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.842241 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.854498 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.875897 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.891018 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.915023 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.927059 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.940101 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.954279 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.968083 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.979969 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-openvswitch\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980006 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980028 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-script-lib\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980052 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dc5ls\" (UniqueName: \"kubernetes.io/projected/0d1c53ef-b268-431b-bdb8-49f45d0715f8-kube-api-access-dc5ls\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980068 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-kubelet\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980081 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-systemd\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980094 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-ovn\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980106 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-config\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980120 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-env-overrides\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980141 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovn-node-metrics-cert\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980155 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-etc-openvswitch\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980175 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-slash\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980191 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-ovn-kubernetes\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980206 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-bin\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980228 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-var-lib-openvswitch\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980284 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-systemd-units\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980301 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-netns\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980317 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-netd\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980407 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-node-log\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.980451 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-log-socket\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.982497 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:46 crc kubenswrapper[4816]: I0216 13:03:46.993427 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.004627 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.014837 4816 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.016522 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.016560 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.016572 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.016690 4816 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.017006 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.022895 4816 kubelet_node_status.go:115] "Node was previously registered" node="crc" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.023213 4816 kubelet_node_status.go:79] "Successfully registered node" node="crc" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.024394 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.024439 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.024449 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.024465 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.024476 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.028084 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: E0216 13:03:47.041228 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.045809 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.045850 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.045861 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.045878 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.045890 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: E0216 13:03:47.058011 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.061749 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.061797 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.061814 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.061835 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.061855 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: E0216 13:03:47.077966 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.081710 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.081845 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-openvswitch\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.081868 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dc5ls\" (UniqueName: \"kubernetes.io/projected/0d1c53ef-b268-431b-bdb8-49f45d0715f8-kube-api-access-dc5ls\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.082169 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-kubelet\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.082194 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-script-lib\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.082872 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-systemd\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.082945 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-systemd\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.082247 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-kubelet\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.082646 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.082993 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.083005 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.083023 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.083034 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.081914 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-openvswitch\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.081800 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.082822 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-script-lib\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.083404 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-ovn\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.082899 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-ovn\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.084637 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-config\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085112 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-env-overrides\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085069 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-config\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085463 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-env-overrides\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085495 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovn-node-metrics-cert\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085512 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-etc-openvswitch\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085541 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-slash\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085571 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-ovn-kubernetes\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085619 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-slash\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085627 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-etc-openvswitch\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085643 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-ovn-kubernetes\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085586 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-bin\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085721 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-systemd-units\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085796 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-var-lib-openvswitch\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085813 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-netns\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085816 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-bin\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085878 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-netd\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085861 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-var-lib-openvswitch\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085907 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-node-log\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085911 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-netd\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085758 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-systemd-units\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085885 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-netns\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.085954 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-node-log\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.086036 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-log-socket\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.086095 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-log-socket\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.090494 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovn-node-metrics-cert\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: E0216 13:03:47.097261 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.099645 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dc5ls\" (UniqueName: \"kubernetes.io/projected/0d1c53ef-b268-431b-bdb8-49f45d0715f8-kube-api-access-dc5ls\") pod \"ovnkube-node-s2hth\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.100804 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.100950 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.101026 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.101101 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.101165 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: E0216 13:03:47.112718 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: E0216 13:03:47.113023 4816 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.114680 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.114718 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.114753 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.114769 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.114780 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.115968 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.218455 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.218504 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.218516 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.218531 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.218541 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.319924 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.319996 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.320007 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.320021 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.320030 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.355354 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 02:18:02.064908765 +0000 UTC Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.398000 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.398074 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.398182 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:47 crc kubenswrapper[4816]: E0216 13:03:47.398220 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:47 crc kubenswrapper[4816]: E0216 13:03:47.398159 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:47 crc kubenswrapper[4816]: E0216 13:03:47.398357 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.421903 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.421947 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.421956 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.421972 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.421982 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.524509 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.524566 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.524579 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.524598 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.524610 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.557095 4816 generic.go:334] "Generic (PLEG): container finished" podID="a7f7924e-16c8-423a-99b7-b480f927bef2" containerID="1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d" exitCode=0 Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.557169 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" event={"ID":"a7f7924e-16c8-423a-99b7-b480f927bef2","Type":"ContainerDied","Data":"1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.557198 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" event={"ID":"a7f7924e-16c8-423a-99b7-b480f927bef2","Type":"ContainerStarted","Data":"de5031cdda6b057b26ba24cf9e304415ea04762dd1c0eac818cc972f8e8350a2"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.559471 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.559511 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.559525 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"ef17edc223c0f0bf77b34d3b91e17b52f2afa340c069fcadeee72408f169e703"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.562898 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-69xcw" event={"ID":"2a58f937-7095-4c3c-b401-3a68ae936b86","Type":"ContainerStarted","Data":"06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.562955 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-69xcw" event={"ID":"2a58f937-7095-4c3c-b401-3a68ae936b86","Type":"ContainerStarted","Data":"f8b5b40f58fc71d8a7de99225fc901ba379006e9e206193d0d41d7b49d01d86e"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.564038 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98" exitCode=0 Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.564067 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.564109 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"59c3e964c4f8728941bb03bec2bacdf211e38bd0ac1809ddabfe840c9e6b5a55"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.576283 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.595487 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.611759 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.627438 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.627476 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.627485 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.627498 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.627506 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.631061 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.650006 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.662130 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.675695 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.691457 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.706479 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.718536 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.732312 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.732377 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.732387 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.732402 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.732411 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.736324 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.747098 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.758578 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.769492 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.781390 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.792210 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.808036 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.822499 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.834941 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.834996 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.835008 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.835069 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.835083 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.836269 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.849197 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.862074 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.876678 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.890682 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.908976 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.929122 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.937289 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.937318 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.937325 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.937338 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.937347 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:47Z","lastTransitionTime":"2026-02-16T13:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.946436 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.974709 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:47 crc kubenswrapper[4816]: I0216 13:03:47.989628 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:47Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.039179 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.039209 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.039217 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.039230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.039238 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.140933 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.140969 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.140978 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.140993 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.141002 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.243345 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.243642 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.243669 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.243684 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.243693 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.348036 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.348349 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.348442 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.348522 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.348580 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.355739 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 11:54:27.356995771 +0000 UTC Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.451455 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.451516 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.451528 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.451550 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.451564 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.553982 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.554016 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.554025 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.554040 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.554050 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.572520 4816 generic.go:334] "Generic (PLEG): container finished" podID="a7f7924e-16c8-423a-99b7-b480f927bef2" containerID="0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba" exitCode=0 Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.572596 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" event={"ID":"a7f7924e-16c8-423a-99b7-b480f927bef2","Type":"ContainerDied","Data":"0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.577243 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.577348 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.577430 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.577489 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.577549 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.577608 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.588713 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.604382 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.621505 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.635746 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.656383 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.656431 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.656444 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.656461 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.656473 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.656560 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.665045 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-nlv2n"] Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.665738 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.668999 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.669316 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.669637 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.671098 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.672982 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.683868 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.696065 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.709260 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.722184 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.733899 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.745584 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.754778 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.758445 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.758482 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.758491 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.758541 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.758553 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.766973 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.777534 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.799870 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.809914 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/aee6f17c-ce25-4b7b-86c9-65d9186d9eb9-serviceca\") pod \"node-ca-nlv2n\" (UID: \"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\") " pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.809970 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aee6f17c-ce25-4b7b-86c9-65d9186d9eb9-host\") pod \"node-ca-nlv2n\" (UID: \"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\") " pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.809993 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m9vf\" (UniqueName: \"kubernetes.io/projected/aee6f17c-ce25-4b7b-86c9-65d9186d9eb9-kube-api-access-2m9vf\") pod \"node-ca-nlv2n\" (UID: \"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\") " pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.824427 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.861577 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.861677 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.861692 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.861709 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.861728 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.863136 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.877294 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.888500 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.897570 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.908439 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.910635 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/aee6f17c-ce25-4b7b-86c9-65d9186d9eb9-serviceca\") pod \"node-ca-nlv2n\" (UID: \"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\") " pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.910798 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aee6f17c-ce25-4b7b-86c9-65d9186d9eb9-host\") pod \"node-ca-nlv2n\" (UID: \"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\") " pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.910892 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m9vf\" (UniqueName: \"kubernetes.io/projected/aee6f17c-ce25-4b7b-86c9-65d9186d9eb9-kube-api-access-2m9vf\") pod \"node-ca-nlv2n\" (UID: \"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\") " pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.910892 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aee6f17c-ce25-4b7b-86c9-65d9186d9eb9-host\") pod \"node-ca-nlv2n\" (UID: \"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\") " pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.912274 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/aee6f17c-ce25-4b7b-86c9-65d9186d9eb9-serviceca\") pod \"node-ca-nlv2n\" (UID: \"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\") " pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.919404 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.930993 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m9vf\" (UniqueName: \"kubernetes.io/projected/aee6f17c-ce25-4b7b-86c9-65d9186d9eb9-kube-api-access-2m9vf\") pod \"node-ca-nlv2n\" (UID: \"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\") " pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.933097 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.951295 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.964440 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.964946 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.965051 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.965244 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.965356 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.965455 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:48Z","lastTransitionTime":"2026-02-16T13:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.980754 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-nlv2n" Feb 16 13:03:48 crc kubenswrapper[4816]: I0216 13:03:48.987212 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:48Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:48 crc kubenswrapper[4816]: W0216 13:03:48.991010 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaee6f17c_ce25_4b7b_86c9_65d9186d9eb9.slice/crio-d629c70ddee714cf1150aa2605458236bf03c5fc093ed4784bc016d8adcb4e18 WatchSource:0}: Error finding container d629c70ddee714cf1150aa2605458236bf03c5fc093ed4784bc016d8adcb4e18: Status 404 returned error can't find the container with id d629c70ddee714cf1150aa2605458236bf03c5fc093ed4784bc016d8adcb4e18 Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.004857 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.017105 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.067753 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.067783 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.067793 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.067808 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.067819 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.113217 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.113695 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.113734 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.113762 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.113792 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.113916 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.113949 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.113964 4816 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114013 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:57.113998328 +0000 UTC m=+36.440712056 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114351 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:03:57.114341158 +0000 UTC m=+36.441054896 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114465 4816 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114492 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:57.114485402 +0000 UTC m=+36.441199130 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114540 4816 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114575 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:57.114565174 +0000 UTC m=+36.441278902 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114604 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114633 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114643 4816 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.114695 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-16 13:03:57.114684697 +0000 UTC m=+36.441398425 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.171168 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.171192 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.171200 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.171212 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.171220 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.273617 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.273665 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.273674 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.273690 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.273699 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.356506 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 00:06:30.481606815 +0000 UTC Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.376520 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.376587 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.376599 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.376616 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.376627 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.397852 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.397898 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.397935 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.397985 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.398081 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:49 crc kubenswrapper[4816]: E0216 13:03:49.398176 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.478867 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.478943 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.478961 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.478983 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.478998 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.580936 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.580973 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.580984 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.580999 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.581010 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.582922 4816 generic.go:334] "Generic (PLEG): container finished" podID="a7f7924e-16c8-423a-99b7-b480f927bef2" containerID="9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637" exitCode=0 Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.583008 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" event={"ID":"a7f7924e-16c8-423a-99b7-b480f927bef2","Type":"ContainerDied","Data":"9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.584246 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-nlv2n" event={"ID":"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9","Type":"ContainerStarted","Data":"98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.584279 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-nlv2n" event={"ID":"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9","Type":"ContainerStarted","Data":"d629c70ddee714cf1150aa2605458236bf03c5fc093ed4784bc016d8adcb4e18"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.596309 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.612540 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.634293 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.646415 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.655775 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.672687 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.683201 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.683255 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.683268 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.683289 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.683306 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.692392 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.706431 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.717383 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.729443 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.745707 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.758414 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.772328 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.782815 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.786540 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.786570 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.786578 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.786592 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.786600 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.805560 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.819072 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.831470 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.844074 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.854188 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.863936 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.873902 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.887222 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.888594 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.888622 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.888644 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.888673 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.888682 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.903274 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.919340 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.933483 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.943328 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.957695 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.975866 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.990494 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.990531 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.990542 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.990568 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:49 crc kubenswrapper[4816]: I0216 13:03:49.990579 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:49Z","lastTransitionTime":"2026-02-16T13:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.009309 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.049531 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.093541 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.093602 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.093625 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.093693 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.093716 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:50Z","lastTransitionTime":"2026-02-16T13:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.196601 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.196680 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.196693 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.196711 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.196721 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:50Z","lastTransitionTime":"2026-02-16T13:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.299437 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.299490 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.299512 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.299534 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.299548 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:50Z","lastTransitionTime":"2026-02-16T13:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.356718 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 07:59:18.756899834 +0000 UTC Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.402500 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.402560 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.402584 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.402613 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.402635 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:50Z","lastTransitionTime":"2026-02-16T13:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.505101 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.505171 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.505182 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.505207 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.505220 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:50Z","lastTransitionTime":"2026-02-16T13:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.591675 4816 generic.go:334] "Generic (PLEG): container finished" podID="a7f7924e-16c8-423a-99b7-b480f927bef2" containerID="306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e" exitCode=0 Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.591749 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" event={"ID":"a7f7924e-16c8-423a-99b7-b480f927bef2","Type":"ContainerDied","Data":"306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.608003 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.608052 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.608062 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.608083 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.608095 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:50Z","lastTransitionTime":"2026-02-16T13:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.615831 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.638922 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.663936 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.676681 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.687320 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.708420 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.711351 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.711385 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.711396 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.711414 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.711426 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:50Z","lastTransitionTime":"2026-02-16T13:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.728160 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.741167 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.754031 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.767065 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.779696 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.795313 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.806838 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.813485 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.813517 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.813527 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.813542 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.813552 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:50Z","lastTransitionTime":"2026-02-16T13:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.817809 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.828347 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:50Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.916229 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.916289 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.916306 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.916331 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:50 crc kubenswrapper[4816]: I0216 13:03:50.916348 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:50Z","lastTransitionTime":"2026-02-16T13:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.018395 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.018434 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.018447 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.018464 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.018476 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.121382 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.121455 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.121479 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.121511 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.121534 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.161531 4816 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.230269 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.231899 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.231946 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.231977 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.232003 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.334870 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.334945 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.334956 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.334974 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.334987 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.357319 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 21:46:59.299156993 +0000 UTC Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.397773 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.397801 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.397844 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:51 crc kubenswrapper[4816]: E0216 13:03:51.398885 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:51 crc kubenswrapper[4816]: E0216 13:03:51.398999 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:51 crc kubenswrapper[4816]: E0216 13:03:51.399089 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.413597 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.429936 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.438204 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.438246 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.438255 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.438269 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.438278 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.441867 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.451741 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.463696 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.486117 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.505111 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.523257 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.540545 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.540584 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.540596 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.540609 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.540620 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.543596 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.559851 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.572359 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.589922 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.598925 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.602803 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.603014 4816 generic.go:334] "Generic (PLEG): container finished" podID="a7f7924e-16c8-423a-99b7-b480f927bef2" containerID="88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d" exitCode=0 Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.603043 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" event={"ID":"a7f7924e-16c8-423a-99b7-b480f927bef2","Type":"ContainerDied","Data":"88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.616454 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.631582 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.640924 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.643986 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.644032 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.644045 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.644062 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.644079 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.653307 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.665170 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.673955 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.684778 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.698747 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.711413 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.721626 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.737952 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.746042 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.746094 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.746106 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.746127 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.746142 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.749177 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.762030 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.781875 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.818139 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.831392 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.848009 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.848048 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.848059 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.848187 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.848199 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.855706 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.950776 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.950817 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.950826 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.950839 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:51 crc kubenswrapper[4816]: I0216 13:03:51.950848 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:51Z","lastTransitionTime":"2026-02-16T13:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.053353 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.053389 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.053399 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.053415 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.053426 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.156248 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.156283 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.156293 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.156308 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.156318 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.260027 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.260097 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.260124 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.260154 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.260178 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.357810 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 13:33:43.28037241 +0000 UTC Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.362558 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.362588 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.362597 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.362610 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.362619 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.465293 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.465343 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.465358 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.465378 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.465389 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.567919 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.567960 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.567970 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.567983 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.567992 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.609576 4816 generic.go:334] "Generic (PLEG): container finished" podID="a7f7924e-16c8-423a-99b7-b480f927bef2" containerID="4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e" exitCode=0 Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.609626 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" event={"ID":"a7f7924e-16c8-423a-99b7-b480f927bef2","Type":"ContainerDied","Data":"4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.623804 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.649342 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.664314 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.671477 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.671517 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.671535 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.671552 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.671565 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.675955 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.734970 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.750697 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.770603 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.774238 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.774267 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.774276 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.774290 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.774299 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.783896 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.794675 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.805485 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.817787 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.831380 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.844207 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.855478 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.872013 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:52Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.876519 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.876547 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.876557 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.876571 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.876579 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.979265 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.979312 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.979327 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.979347 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:52 crc kubenswrapper[4816]: I0216 13:03:52.979361 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:52Z","lastTransitionTime":"2026-02-16T13:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.081933 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.081988 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.082009 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.082033 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.082051 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:53Z","lastTransitionTime":"2026-02-16T13:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.184147 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.184174 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.184181 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.184209 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.184220 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:53Z","lastTransitionTime":"2026-02-16T13:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.287453 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.287540 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.287559 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.287579 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.287639 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:53Z","lastTransitionTime":"2026-02-16T13:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.358678 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 07:44:41.98460939 +0000 UTC Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.390162 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.390244 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.390265 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.390631 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.390952 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:53Z","lastTransitionTime":"2026-02-16T13:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.398373 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.398430 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:53 crc kubenswrapper[4816]: E0216 13:03:53.398531 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:53 crc kubenswrapper[4816]: E0216 13:03:53.398732 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.399235 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:53 crc kubenswrapper[4816]: E0216 13:03:53.399444 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.494004 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.494045 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.494060 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.494080 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.494095 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:53Z","lastTransitionTime":"2026-02-16T13:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.596933 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.596990 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.597007 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.597034 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.597089 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:53Z","lastTransitionTime":"2026-02-16T13:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.617002 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" event={"ID":"a7f7924e-16c8-423a-99b7-b480f927bef2","Type":"ContainerStarted","Data":"7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.622534 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.622958 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.623001 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.623126 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.637104 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.653871 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.654327 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.654892 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.674039 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.691785 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.699445 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.699489 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.699506 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.699530 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.699541 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:53Z","lastTransitionTime":"2026-02-16T13:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.713188 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.725717 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.740059 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.757762 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.769120 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.781674 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.803183 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.803245 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.803258 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.803275 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.803292 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:53Z","lastTransitionTime":"2026-02-16T13:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.803698 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.821106 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.838419 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.851391 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.861401 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.873774 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.885321 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.897223 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.906419 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.906482 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.906501 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.906527 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.906545 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:53Z","lastTransitionTime":"2026-02-16T13:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.913848 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.929883 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.948690 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.966137 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:53 crc kubenswrapper[4816]: I0216 13:03:53.992804 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:53Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.009079 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.009117 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.009128 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.009144 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.009159 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.015794 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:54Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.040965 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:54Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.052980 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:54Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.064176 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:54Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.077233 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:54Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.090154 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:54Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.100709 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:54Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.111747 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.111809 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.111825 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.111846 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.111858 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.215190 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.215274 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.215305 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.215344 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.215369 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.317927 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.317997 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.318013 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.318033 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.318047 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.359584 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 09:57:03.519823133 +0000 UTC Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.398994 4816 scope.go:117] "RemoveContainer" containerID="b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.422124 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.422373 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.422560 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.422738 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.422899 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.525988 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.526060 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.526072 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.526091 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.526129 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.628276 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.628303 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.628311 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.628325 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.628334 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.731844 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.732260 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.732280 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.732304 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.732321 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.834358 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.834429 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.834455 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.834486 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.834519 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.937157 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.937190 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.937202 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.937218 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:54 crc kubenswrapper[4816]: I0216 13:03:54.937230 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:54Z","lastTransitionTime":"2026-02-16T13:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.039471 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.039522 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.039533 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.039549 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.039563 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.141998 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.142074 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.142089 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.142109 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.142135 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.245213 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.245268 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.245286 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.245310 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.245327 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.348154 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.348210 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.348223 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.348246 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.348261 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.360495 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 09:33:37.405865452 +0000 UTC Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.398353 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.398422 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.398422 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:55 crc kubenswrapper[4816]: E0216 13:03:55.398489 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:55 crc kubenswrapper[4816]: E0216 13:03:55.398527 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:55 crc kubenswrapper[4816]: E0216 13:03:55.398698 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.451385 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.451457 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.451470 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.451494 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.451508 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.554860 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.554901 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.554910 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.554927 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.554936 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.631357 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.633080 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.633713 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.648320 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.657147 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.657807 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.657894 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.657973 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.658070 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.660720 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.675301 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.687888 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.702002 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.717781 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.734810 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.748357 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.760243 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.760289 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.760301 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.760323 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.760337 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.769877 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.786430 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.807212 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.817917 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.828952 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.845075 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.856710 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:55Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.862143 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.862170 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.862179 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.862193 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.862203 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.964494 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.964537 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.964547 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.964566 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:55 crc kubenswrapper[4816]: I0216 13:03:55.964578 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:55Z","lastTransitionTime":"2026-02-16T13:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.067050 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.067130 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.067150 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.067182 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.067207 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.169427 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.169484 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.169501 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.169523 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.169570 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.272938 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.273020 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.273040 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.273065 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.273084 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.361231 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 02:31:11.061252695 +0000 UTC Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.376734 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.376907 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.376929 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.376958 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.376982 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.480377 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.480411 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.480419 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.480432 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.480442 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.583467 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.583534 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.583557 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.583582 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.583599 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.638749 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/0.log" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.641807 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa" exitCode=1 Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.641907 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.643610 4816 scope.go:117] "RemoveContainer" containerID="464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.663295 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.681360 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.686621 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.686670 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.686681 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.686696 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.686705 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.700066 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.719380 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.737572 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.752309 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.767140 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.780690 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.788591 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.788640 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.788670 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.788692 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.788709 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.797727 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.816490 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.834819 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:56Z\\\",\\\"message\\\":\\\"3:55.982744 6137 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.982842 6137 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:03:55.982897 6137 factory.go:656] Stopping watch factory\\\\nI0216 13:03:55.982949 6137 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:03:55.982961 6137 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983170 6137 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983258 6137 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.970957 6137 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.983694 6137 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.851209 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.870192 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.892048 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.892009 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.892090 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.892270 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.892291 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.892303 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.908512 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:56Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.995409 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.995481 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.995493 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.995519 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:56 crc kubenswrapper[4816]: I0216 13:03:56.995535 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:56Z","lastTransitionTime":"2026-02-16T13:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.098639 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.098709 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.098723 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.098739 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.098751 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.117248 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.117368 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.117403 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.117432 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.117473 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117477 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117498 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117510 4816 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117550 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117553 4816 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117565 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117577 4816 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117587 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:04:13.117526461 +0000 UTC m=+52.444240189 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117672 4816 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117702 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:13.117639294 +0000 UTC m=+52.444353022 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117733 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:13.117722736 +0000 UTC m=+52.444436464 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117792 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:13.117748127 +0000 UTC m=+52.444462045 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.117822 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:13.117813589 +0000 UTC m=+52.444527317 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.204522 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.206637 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.206647 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.206674 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.206684 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.309260 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.309300 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.309315 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.309333 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.309346 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.362263 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 20:04:00.212612064 +0000 UTC Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.366141 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.366194 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.366213 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.366237 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.366257 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.378518 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.382826 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.382862 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.382873 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.382891 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.382923 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.394780 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.398135 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.398237 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.398538 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.398601 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.398753 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.398777 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.398789 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.398802 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.398812 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.399259 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.399449 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.410991 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.414547 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.414610 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.414632 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.414702 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.414723 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.437843 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.446418 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.446458 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.446469 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.446487 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.446499 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.475632 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.475762 4816 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.477447 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.477472 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.477479 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.477491 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.477499 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.580391 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.580435 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.580450 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.580470 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.580485 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.648584 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/1.log" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.649765 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/0.log" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.654177 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e" exitCode=1 Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.654221 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.654259 4816 scope.go:117] "RemoveContainer" containerID="464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.655417 4816 scope.go:117] "RemoveContainer" containerID="aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e" Feb 16 13:03:57 crc kubenswrapper[4816]: E0216 13:03:57.655907 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.676557 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.683203 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.683256 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.683274 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.683306 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.683324 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.695132 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.718398 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:56Z\\\",\\\"message\\\":\\\"3:55.982744 6137 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.982842 6137 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:03:55.982897 6137 factory.go:656] Stopping watch factory\\\\nI0216 13:03:55.982949 6137 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:03:55.982961 6137 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983170 6137 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983258 6137 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.970957 6137 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.983694 6137 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.733833 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.744580 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.760456 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.777916 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.786384 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.786443 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.786463 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.786490 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.786509 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.794125 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.804645 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.815072 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.826937 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.839075 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.851577 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.862446 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.873936 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.888944 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.888976 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.888986 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.889001 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.889013 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.991605 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.991679 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.991690 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.991706 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:57 crc kubenswrapper[4816]: I0216 13:03:57.991715 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:57Z","lastTransitionTime":"2026-02-16T13:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.095028 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.095115 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.095158 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.095196 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.095221 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:58Z","lastTransitionTime":"2026-02-16T13:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.197379 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.197467 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.197493 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.197526 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.197551 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:58Z","lastTransitionTime":"2026-02-16T13:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.300082 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.300130 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.300142 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.300161 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.300174 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:58Z","lastTransitionTime":"2026-02-16T13:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.363250 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 20:26:50.751122528 +0000 UTC Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.402639 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.402705 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.402721 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.402740 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.402754 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:58Z","lastTransitionTime":"2026-02-16T13:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.506071 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.506124 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.506138 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.506156 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.506169 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:58Z","lastTransitionTime":"2026-02-16T13:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.608149 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.608213 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.608230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.608254 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.608270 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:58Z","lastTransitionTime":"2026-02-16T13:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.658958 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/1.log" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.710901 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.710932 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.710944 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.710958 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.710969 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:58Z","lastTransitionTime":"2026-02-16T13:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.780063 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9"] Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.780798 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.783724 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.784367 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.807895 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.814897 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.814940 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.814956 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.814976 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.814997 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:58Z","lastTransitionTime":"2026-02-16T13:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.837645 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfac4297-e728-4f95-8336-d2830bc552b2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.837710 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfac4297-e728-4f95-8336-d2830bc552b2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.837744 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfac4297-e728-4f95-8336-d2830bc552b2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.837799 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54f9h\" (UniqueName: \"kubernetes.io/projected/bfac4297-e728-4f95-8336-d2830bc552b2-kube-api-access-54f9h\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.846303 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.867990 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.882816 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.902265 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.917375 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.919340 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.919389 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.919411 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.919443 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.919495 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:58Z","lastTransitionTime":"2026-02-16T13:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.935646 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.938851 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54f9h\" (UniqueName: \"kubernetes.io/projected/bfac4297-e728-4f95-8336-d2830bc552b2-kube-api-access-54f9h\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.938955 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfac4297-e728-4f95-8336-d2830bc552b2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.938993 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfac4297-e728-4f95-8336-d2830bc552b2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.939027 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfac4297-e728-4f95-8336-d2830bc552b2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.939993 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfac4297-e728-4f95-8336-d2830bc552b2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.940479 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfac4297-e728-4f95-8336-d2830bc552b2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.949553 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfac4297-e728-4f95-8336-d2830bc552b2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.956930 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.970071 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54f9h\" (UniqueName: \"kubernetes.io/projected/bfac4297-e728-4f95-8336-d2830bc552b2-kube-api-access-54f9h\") pod \"ovnkube-control-plane-749d76644c-qfsj9\" (UID: \"bfac4297-e728-4f95-8336-d2830bc552b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.972631 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:58 crc kubenswrapper[4816]: I0216 13:03:58.986154 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:58Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.004317 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.023182 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.023238 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.023255 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.023280 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.023298 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.034940 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.056237 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.079232 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.099790 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.113209 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:56Z\\\",\\\"message\\\":\\\"3:55.982744 6137 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.982842 6137 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:03:55.982897 6137 factory.go:656] Stopping watch factory\\\\nI0216 13:03:55.982949 6137 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:03:55.982961 6137 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983170 6137 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983258 6137 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.970957 6137 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.983694 6137 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: W0216 13:03:59.121991 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfac4297_e728_4f95_8336_d2830bc552b2.slice/crio-f4b65dbdceb64d0a36b960e8555828251a7c2f5cd20c73efbeb9f82dd309c0f1 WatchSource:0}: Error finding container f4b65dbdceb64d0a36b960e8555828251a7c2f5cd20c73efbeb9f82dd309c0f1: Status 404 returned error can't find the container with id f4b65dbdceb64d0a36b960e8555828251a7c2f5cd20c73efbeb9f82dd309c0f1 Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.126455 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.126506 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.126524 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.126566 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.126588 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.138173 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.229534 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.229823 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.229930 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.230064 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.230173 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.332740 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.332785 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.332797 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.332813 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.332824 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.363440 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 08:36:57.788719281 +0000 UTC Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.397742 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.397965 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:03:59 crc kubenswrapper[4816]: E0216 13:03:59.398076 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.398137 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:03:59 crc kubenswrapper[4816]: E0216 13:03:59.398306 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:03:59 crc kubenswrapper[4816]: E0216 13:03:59.398207 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.435837 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.435873 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.435889 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.435912 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.435927 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.538878 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.538918 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.538932 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.538953 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.538966 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.641998 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.642029 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.642038 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.642050 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.642059 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.667356 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" event={"ID":"bfac4297-e728-4f95-8336-d2830bc552b2","Type":"ContainerStarted","Data":"c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.667398 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" event={"ID":"bfac4297-e728-4f95-8336-d2830bc552b2","Type":"ContainerStarted","Data":"54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.667408 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" event={"ID":"bfac4297-e728-4f95-8336-d2830bc552b2","Type":"ContainerStarted","Data":"f4b65dbdceb64d0a36b960e8555828251a7c2f5cd20c73efbeb9f82dd309c0f1"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.689872 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.717389 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.730411 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.743245 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.745650 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.745741 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.745763 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.745788 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.745817 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.757764 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.772832 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.786611 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.806748 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.821322 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.834646 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.848268 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.848306 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.848318 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.848334 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.848346 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.848850 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.871988 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.884859 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.896378 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.924484 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:56Z\\\",\\\"message\\\":\\\"3:55.982744 6137 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.982842 6137 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:03:55.982897 6137 factory.go:656] Stopping watch factory\\\\nI0216 13:03:55.982949 6137 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:03:55.982961 6137 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983170 6137 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983258 6137 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.970957 6137 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.983694 6137 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.942437 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:59Z is after 2025-08-24T17:21:41Z" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.951344 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.951376 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.951385 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.951400 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:03:59 crc kubenswrapper[4816]: I0216 13:03:59.951411 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:03:59Z","lastTransitionTime":"2026-02-16T13:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.054200 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.054237 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.054246 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.054259 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.054268 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.157461 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.157533 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.157556 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.157588 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.157611 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.258075 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-gfwts"] Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.258862 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:00 crc kubenswrapper[4816]: E0216 13:04:00.258968 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.262001 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.262058 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.262075 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.262098 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.262115 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.281932 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.297315 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.325995 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:56Z\\\",\\\"message\\\":\\\"3:55.982744 6137 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.982842 6137 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:03:55.982897 6137 factory.go:656] Stopping watch factory\\\\nI0216 13:03:55.982949 6137 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:03:55.982961 6137 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983170 6137 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983258 6137 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.970957 6137 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.983694 6137 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.341941 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.354609 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-766fq\" (UniqueName: \"kubernetes.io/projected/108200fc-f37f-4d80-bd46-314679989e11-kube-api-access-766fq\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.354768 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.364341 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 00:42:33.559172725 +0000 UTC Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.365324 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.365396 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.365419 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.365448 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.365469 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.366802 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.398215 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.415140 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.429248 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.445313 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.455458 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-766fq\" (UniqueName: \"kubernetes.io/projected/108200fc-f37f-4d80-bd46-314679989e11-kube-api-access-766fq\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.455510 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:00 crc kubenswrapper[4816]: E0216 13:04:00.455621 4816 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:00 crc kubenswrapper[4816]: E0216 13:04:00.455688 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs podName:108200fc-f37f-4d80-bd46-314679989e11 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:00.95567256 +0000 UTC m=+40.282386288 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs") pod "network-metrics-daemon-gfwts" (UID: "108200fc-f37f-4d80-bd46-314679989e11") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.459745 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.468728 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.468756 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.468767 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.468782 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.468792 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.475805 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.479912 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-766fq\" (UniqueName: \"kubernetes.io/projected/108200fc-f37f-4d80-bd46-314679989e11-kube-api-access-766fq\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.492560 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.508136 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.524181 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.533098 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.543309 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.556688 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:00Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.571954 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.572029 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.572050 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.572077 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.572096 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.674194 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.674234 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.674246 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.674261 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.674277 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.777146 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.777249 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.777277 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.777310 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.777334 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.880178 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.880236 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.880252 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.880277 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.880295 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.961282 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:00 crc kubenswrapper[4816]: E0216 13:04:00.961512 4816 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:00 crc kubenswrapper[4816]: E0216 13:04:00.961638 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs podName:108200fc-f37f-4d80-bd46-314679989e11 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:01.961608653 +0000 UTC m=+41.288322421 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs") pod "network-metrics-daemon-gfwts" (UID: "108200fc-f37f-4d80-bd46-314679989e11") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.983825 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.983880 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.983898 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.983922 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:00 crc kubenswrapper[4816]: I0216 13:04:00.983940 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:00Z","lastTransitionTime":"2026-02-16T13:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.087435 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.087510 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.087531 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.087557 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.087576 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:01Z","lastTransitionTime":"2026-02-16T13:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.190322 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.190365 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.190378 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.190399 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.190413 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:01Z","lastTransitionTime":"2026-02-16T13:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.293361 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.293444 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.293468 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.293499 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.293520 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:01Z","lastTransitionTime":"2026-02-16T13:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.364557 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 02:11:35.521777234 +0000 UTC Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.398945 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:01 crc kubenswrapper[4816]: E0216 13:04:01.399202 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.400036 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:01 crc kubenswrapper[4816]: E0216 13:04:01.400224 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.400286 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:01 crc kubenswrapper[4816]: E0216 13:04:01.400480 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.405097 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.405159 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.405180 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.405209 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.405228 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:01Z","lastTransitionTime":"2026-02-16T13:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.423813 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.436947 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.449569 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.461156 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.475066 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.493247 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.506357 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.508180 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.508217 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.508228 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.508244 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.508258 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:01Z","lastTransitionTime":"2026-02-16T13:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.517921 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.528945 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.545813 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:56Z\\\",\\\"message\\\":\\\"3:55.982744 6137 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.982842 6137 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:03:55.982897 6137 factory.go:656] Stopping watch factory\\\\nI0216 13:03:55.982949 6137 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:03:55.982961 6137 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983170 6137 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983258 6137 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.970957 6137 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.983694 6137 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.557358 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.569124 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.589086 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.601807 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.610215 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.610245 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.610257 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.610271 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.610283 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:01Z","lastTransitionTime":"2026-02-16T13:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.611867 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.624883 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.635864 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:01Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.712642 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.712742 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.712791 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.712811 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.712827 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:01Z","lastTransitionTime":"2026-02-16T13:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.816597 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.816708 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.816745 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.816775 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.816794 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:01Z","lastTransitionTime":"2026-02-16T13:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.919620 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.919698 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.919714 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.919733 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.919746 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:01Z","lastTransitionTime":"2026-02-16T13:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:01 crc kubenswrapper[4816]: I0216 13:04:01.971320 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:01 crc kubenswrapper[4816]: E0216 13:04:01.971497 4816 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:01 crc kubenswrapper[4816]: E0216 13:04:01.971596 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs podName:108200fc-f37f-4d80-bd46-314679989e11 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:03.971572847 +0000 UTC m=+43.298286635 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs") pod "network-metrics-daemon-gfwts" (UID: "108200fc-f37f-4d80-bd46-314679989e11") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.022761 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.022795 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.022803 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.022817 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.022826 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.125418 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.125507 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.125525 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.125543 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.125555 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.229066 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.229144 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.229166 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.229194 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.229224 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.332926 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.332980 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.333006 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.333032 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.333053 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.364911 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 14:04:38.299000204 +0000 UTC Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.398488 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:02 crc kubenswrapper[4816]: E0216 13:04:02.398713 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.436355 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.436425 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.436444 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.436469 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.436492 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.539219 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.539261 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.539292 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.539311 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.539324 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.641634 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.641710 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.641733 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.641760 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.641776 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.744959 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.744995 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.745005 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.745022 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.745034 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.848331 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.848383 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.848401 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.848424 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.848443 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.951780 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.951863 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.951881 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.951905 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:02 crc kubenswrapper[4816]: I0216 13:04:02.951932 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:02Z","lastTransitionTime":"2026-02-16T13:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.056049 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.056124 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.056144 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.056174 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.056195 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.158995 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.159055 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.159072 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.159095 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.159112 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.262417 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.262467 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.262479 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.262497 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.262509 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.365138 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 21:18:29.956134415 +0000 UTC Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.366042 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.366098 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.366118 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.366142 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.366162 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.398275 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.398375 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:03 crc kubenswrapper[4816]: E0216 13:04:03.398460 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:03 crc kubenswrapper[4816]: E0216 13:04:03.398555 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.398722 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:03 crc kubenswrapper[4816]: E0216 13:04:03.398871 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.469569 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.469626 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.469644 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.469697 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.469714 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.573395 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.573469 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.573487 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.573515 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.573533 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.677007 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.677070 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.677092 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.677123 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.677144 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.779683 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.779723 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.779735 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.779752 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.779768 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.882214 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.882285 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.882299 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.882319 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.882333 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.984930 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.984985 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.985002 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.985022 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.985036 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:03Z","lastTransitionTime":"2026-02-16T13:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:03 crc kubenswrapper[4816]: I0216 13:04:03.992534 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:03 crc kubenswrapper[4816]: E0216 13:04:03.992774 4816 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:03 crc kubenswrapper[4816]: E0216 13:04:03.992875 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs podName:108200fc-f37f-4d80-bd46-314679989e11 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:07.992853722 +0000 UTC m=+47.319567460 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs") pod "network-metrics-daemon-gfwts" (UID: "108200fc-f37f-4d80-bd46-314679989e11") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.087740 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.087799 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.087818 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.087842 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.087858 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:04Z","lastTransitionTime":"2026-02-16T13:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.191237 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.191310 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.191333 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.191362 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.191384 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:04Z","lastTransitionTime":"2026-02-16T13:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.293936 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.293982 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.293998 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.294021 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.294037 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:04Z","lastTransitionTime":"2026-02-16T13:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.365390 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 16:52:13.085055087 +0000 UTC Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.400705 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:04 crc kubenswrapper[4816]: E0216 13:04:04.400998 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.402325 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.402460 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.402491 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.402536 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.402561 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:04Z","lastTransitionTime":"2026-02-16T13:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.506008 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.506090 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.506124 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.506154 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.506175 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:04Z","lastTransitionTime":"2026-02-16T13:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.608904 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.608975 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.609011 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.609039 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.609062 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:04Z","lastTransitionTime":"2026-02-16T13:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.711545 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.711615 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.712731 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.712773 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.712786 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:04Z","lastTransitionTime":"2026-02-16T13:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.816179 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.816251 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.816274 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.816299 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.816316 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:04Z","lastTransitionTime":"2026-02-16T13:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.919490 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.919553 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.919570 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.919595 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:04 crc kubenswrapper[4816]: I0216 13:04:04.919614 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:04Z","lastTransitionTime":"2026-02-16T13:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.022701 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.023054 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.023292 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.023559 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.023824 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.127743 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.127797 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.127813 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.127837 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.127856 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.231152 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.231216 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.231237 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.231260 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.231297 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.334039 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.334084 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.334095 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.334115 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.334128 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.365889 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 21:37:01.793003797 +0000 UTC Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.398496 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.398555 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:05 crc kubenswrapper[4816]: E0216 13:04:05.398751 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.398776 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:05 crc kubenswrapper[4816]: E0216 13:04:05.398907 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:05 crc kubenswrapper[4816]: E0216 13:04:05.399080 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.438210 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.438270 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.438296 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.438329 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.438352 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.541721 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.541863 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.541888 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.541921 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.541944 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.644344 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.644410 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.644431 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.644461 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.644479 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.748453 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.748520 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.748540 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.748566 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.748584 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.851714 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.852082 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.852259 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.852403 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.852534 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.955477 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.955795 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.955952 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.956079 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:05 crc kubenswrapper[4816]: I0216 13:04:05.956188 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:05Z","lastTransitionTime":"2026-02-16T13:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.059471 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.059545 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.059566 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.059595 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.059617 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.161763 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.161797 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.161814 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.161830 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.161849 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.264535 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.264570 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.264579 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.264593 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.264602 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.366026 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 23:56:21.781581207 +0000 UTC Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.367981 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.368057 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.368085 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.368124 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.368147 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.398357 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:06 crc kubenswrapper[4816]: E0216 13:04:06.398500 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.471252 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.471327 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.471347 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.471418 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.471438 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.574151 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.574273 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.574292 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.574320 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.574340 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.681786 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.681858 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.681877 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.681904 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.681923 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.784563 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.784612 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.784629 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.784697 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.784718 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.888451 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.888504 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.888517 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.888535 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.888546 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.991596 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.991705 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.991733 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.991761 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:06 crc kubenswrapper[4816]: I0216 13:04:06.991781 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:06Z","lastTransitionTime":"2026-02-16T13:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.094780 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.094841 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.094859 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.094884 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.094906 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.198064 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.198152 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.198176 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.198207 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.198229 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.301921 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.301965 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.301976 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.301991 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.302001 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.367029 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 01:28:39.851173268 +0000 UTC Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.397734 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.397812 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.397749 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:07 crc kubenswrapper[4816]: E0216 13:04:07.397958 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:07 crc kubenswrapper[4816]: E0216 13:04:07.398039 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:07 crc kubenswrapper[4816]: E0216 13:04:07.398147 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.404961 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.405033 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.405051 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.405075 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.405093 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.508773 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.508865 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.508888 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.508917 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.508938 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.612218 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.612306 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.612329 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.612358 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.612383 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.715544 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.715603 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.715621 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.715643 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.715711 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.819241 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.819302 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.819322 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.819352 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.819380 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.849031 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.849093 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.849112 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.849132 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.849156 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: E0216 13:04:07.870073 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:07Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.875301 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.875371 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.875395 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.875427 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.875448 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: E0216 13:04:07.899600 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:07Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.904544 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.904604 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.904626 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.904683 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.904737 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: E0216 13:04:07.924791 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:07Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.929113 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.929174 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.929191 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.929214 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.929231 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: E0216 13:04:07.949555 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:07Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.955197 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.955270 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.955296 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.955326 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.955348 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:07 crc kubenswrapper[4816]: E0216 13:04:07.972294 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:07Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:07 crc kubenswrapper[4816]: E0216 13:04:07.972539 4816 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.975267 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.975317 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.975329 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.975348 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:07 crc kubenswrapper[4816]: I0216 13:04:07.975360 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:07Z","lastTransitionTime":"2026-02-16T13:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.038139 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:08 crc kubenswrapper[4816]: E0216 13:04:08.038350 4816 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:08 crc kubenswrapper[4816]: E0216 13:04:08.038454 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs podName:108200fc-f37f-4d80-bd46-314679989e11 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:16.038432599 +0000 UTC m=+55.365146337 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs") pod "network-metrics-daemon-gfwts" (UID: "108200fc-f37f-4d80-bd46-314679989e11") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.078127 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.078159 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.078170 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.078186 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.078198 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:08Z","lastTransitionTime":"2026-02-16T13:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.181304 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.181638 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.181891 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.182108 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.182300 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:08Z","lastTransitionTime":"2026-02-16T13:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.285578 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.285997 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.286144 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.286279 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.286437 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:08Z","lastTransitionTime":"2026-02-16T13:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.367760 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 10:11:58.83499369 +0000 UTC Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.389236 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.389447 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.389620 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.389888 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.390106 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:08Z","lastTransitionTime":"2026-02-16T13:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.398308 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:08 crc kubenswrapper[4816]: E0216 13:04:08.398462 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.494071 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.494167 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.494185 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.494210 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.494227 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:08Z","lastTransitionTime":"2026-02-16T13:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.596819 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.596914 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.596931 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.596955 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.596972 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:08Z","lastTransitionTime":"2026-02-16T13:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.701350 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.701934 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.702618 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.702932 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.703250 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:08Z","lastTransitionTime":"2026-02-16T13:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.807082 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.807354 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.807471 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.807625 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.807828 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:08Z","lastTransitionTime":"2026-02-16T13:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.910333 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.910382 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.910393 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.910411 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:08 crc kubenswrapper[4816]: I0216 13:04:08.910422 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:08Z","lastTransitionTime":"2026-02-16T13:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.014622 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.015816 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.015841 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.015860 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.015872 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.119132 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.119414 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.119695 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.119908 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.120099 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.141603 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.152876 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.163541 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.179396 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.199342 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.221066 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.222822 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.222870 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.222881 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.222899 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.222912 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.236603 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.252184 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.269827 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.286929 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.301046 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.318358 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.325336 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.325370 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.325382 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.325400 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.325413 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.338030 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.360819 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:56Z\\\",\\\"message\\\":\\\"3:55.982744 6137 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.982842 6137 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:03:55.982897 6137 factory.go:656] Stopping watch factory\\\\nI0216 13:03:55.982949 6137 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:03:55.982961 6137 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983170 6137 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983258 6137 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.970957 6137 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.983694 6137 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.369038 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 07:18:17.717510204 +0000 UTC Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.373340 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.385304 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.397642 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.397720 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.398037 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:09 crc kubenswrapper[4816]: E0216 13:04:09.397899 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:09 crc kubenswrapper[4816]: E0216 13:04:09.398162 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:09 crc kubenswrapper[4816]: E0216 13:04:09.398214 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.418117 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.427980 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.428067 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.428096 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.428125 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.428145 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.450025 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.464833 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:09Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.532008 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.532084 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.532105 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.532133 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.532153 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.635483 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.636047 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.636221 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.636384 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.636587 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.739831 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.740237 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.740424 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.740582 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.740746 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.844202 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.844303 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.844323 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.844344 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.844359 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.946420 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.946462 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.946474 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.946489 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:09 crc kubenswrapper[4816]: I0216 13:04:09.946501 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:09Z","lastTransitionTime":"2026-02-16T13:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.050010 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.050131 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.050152 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.050172 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.050186 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.153043 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.153130 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.153194 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.153240 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.153260 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.256056 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.256108 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.256117 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.256130 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.256138 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.358346 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.358475 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.358496 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.358519 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.358539 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.369982 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 07:24:52.452065755 +0000 UTC Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.398373 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:10 crc kubenswrapper[4816]: E0216 13:04:10.398535 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.460863 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.460919 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.460935 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.460954 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.460967 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.554602 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.564345 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.564393 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.564408 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.564431 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.564445 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.569619 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.587248 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.604068 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.619515 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.632393 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.647784 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.665029 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.667167 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.667225 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.667242 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.667270 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.667288 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.684691 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.704179 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.722879 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.745550 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:56Z\\\",\\\"message\\\":\\\"3:55.982744 6137 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.982842 6137 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:03:55.982897 6137 factory.go:656] Stopping watch factory\\\\nI0216 13:03:55.982949 6137 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:03:55.982961 6137 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983170 6137 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983258 6137 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.970957 6137 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.983694 6137 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.761796 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.772172 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.772218 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.772229 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.772246 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.772259 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.772954 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.786168 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.813286 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.828349 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.839532 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.851090 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:10Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.874678 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.874729 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.874741 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.874759 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.874773 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.976433 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.976471 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.976480 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.976495 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:10 crc kubenswrapper[4816]: I0216 13:04:10.976507 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:10Z","lastTransitionTime":"2026-02-16T13:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.079853 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.079929 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.079952 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.079982 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.080007 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:11Z","lastTransitionTime":"2026-02-16T13:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.200377 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.200415 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.200424 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.200438 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.200448 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:11Z","lastTransitionTime":"2026-02-16T13:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.303003 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.303042 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.303051 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.303069 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.303079 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:11Z","lastTransitionTime":"2026-02-16T13:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.370617 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 22:30:56.214011087 +0000 UTC Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.398073 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.398073 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:11 crc kubenswrapper[4816]: E0216 13:04:11.398290 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.398360 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:11 crc kubenswrapper[4816]: E0216 13:04:11.399008 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:11 crc kubenswrapper[4816]: E0216 13:04:11.399109 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.399745 4816 scope.go:117] "RemoveContainer" containerID="aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.408942 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.409010 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.409034 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.409066 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.409088 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:11Z","lastTransitionTime":"2026-02-16T13:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.419402 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.442311 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.463925 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.478520 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.501437 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.511303 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.511369 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.511453 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.511487 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.511509 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:11Z","lastTransitionTime":"2026-02-16T13:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.531990 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.545785 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.563763 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.576617 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.605322 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464094014f2ea1f2ae4a6b12f7ee3354c190a874e370e4a3d1069e13008fa1aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:56Z\\\",\\\"message\\\":\\\"3:55.982744 6137 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.982842 6137 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:03:55.982897 6137 factory.go:656] Stopping watch factory\\\\nI0216 13:03:55.982949 6137 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:03:55.982961 6137 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983170 6137 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:03:55.983258 6137 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.970957 6137 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:03:55.983694 6137 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.616346 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.616404 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.616416 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.616432 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.616443 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:11Z","lastTransitionTime":"2026-02-16T13:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.630239 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.657337 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.684002 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.695733 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.704480 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.712564 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/1.log" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.715689 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.715747 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.716053 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.718049 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.718079 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.718092 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.718109 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.718122 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:11Z","lastTransitionTime":"2026-02-16T13:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.726929 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.739131 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.755104 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.769235 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.802168 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.820727 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.820771 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.820781 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.820796 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.820807 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:11Z","lastTransitionTime":"2026-02-16T13:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.823500 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.837310 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.858025 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.877032 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.892936 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.913858 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.922694 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.922746 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.922758 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.922777 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.922791 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:11Z","lastTransitionTime":"2026-02-16T13:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.924810 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.936416 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.948299 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.959646 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:11 crc kubenswrapper[4816]: I0216 13:04:11.977503 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.001037 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:11Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.017066 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.028808 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.028844 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.028852 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.028865 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.028875 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.039953 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.050318 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.131992 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.132026 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.132034 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.132047 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.132056 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.234524 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.234762 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.234772 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.234784 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.234793 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.337221 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.337254 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.337263 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.337280 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.337289 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.371040 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 08:23:17.221044979 +0000 UTC Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.398485 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:12 crc kubenswrapper[4816]: E0216 13:04:12.398738 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.440343 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.440382 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.440395 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.440412 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.440424 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.542793 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.542825 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.542865 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.542878 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.542886 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.645789 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.645848 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.645871 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.645895 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.645912 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.723477 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/2.log" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.724471 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/1.log" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.728268 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc" exitCode=1 Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.728322 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.728368 4816 scope.go:117] "RemoveContainer" containerID="aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.729471 4816 scope.go:117] "RemoveContainer" containerID="f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc" Feb 16 13:04:12 crc kubenswrapper[4816]: E0216 13:04:12.729786 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.745753 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.749303 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.749325 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.749333 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.749346 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.749354 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.766138 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.780928 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.793845 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.813522 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.828093 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.841720 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.852380 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.852427 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.852441 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.852461 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.852476 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.855779 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.871128 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.890691 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa7693d5c3a71caa16cec426736f5095887f9f377567051c34ce8cba953ad90e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:03:57Z\\\",\\\"message\\\":\\\": Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:03:57Z is after 2025-08-24T17:21:41Z]\\\\nI0216 13:03:57.569947 6292 services_controller.go:434] Service openshift-authentication/oauth-openshift retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{oauth-openshift openshift-authentication 327e9277-4a34-458b-9afd-a4d0b83d7a80 5000 0 2025-02-23 05:23:11 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:oauth-openshift] map[operator.openshift.io/spec-hash:d9e6d53076d47ab2d123d8b1ba8ec6543488d973dcc4e02349493cd1c33bce83 service.alpha.openshift.io/serving-cert-secret-name:v4-0-config-system-serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: oauth-openshift,},ClusterIP:10.217.4.222,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,He\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:12Z\\\",\\\"message\\\":\\\"sions/factory.go:140\\\\nI0216 13:04:12.270266 6507 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0216 13:04:12.270460 6507 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:04:12.270548 6507 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:04:12.270921 6507 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:04:12.270960 6507 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0216 13:04:12.270969 6507 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0216 13:04:12.270992 6507 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0216 13:04:12.271018 6507 factory.go:656] Stopping watch factory\\\\nI0216 13:04:12.271046 6507 ovnkube.go:599] Stopped ovnkube\\\\nI0216 13:04:12.271076 6507 handler.go:208] Removed *v1.Node event handler 2\\\\nI0216 13:04:12.271111 6507 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:04:12.271114 6507 handler.go:208] Removed *v1.Node event handler 7\\\\nI0216 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.907266 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.924429 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.940734 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.957201 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.957239 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.957250 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.957271 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.957284 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:12Z","lastTransitionTime":"2026-02-16T13:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.963394 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:12 crc kubenswrapper[4816]: I0216 13:04:12.982740 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.001236 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:12Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.021005 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.033193 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.059784 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.059822 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.059833 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.059849 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.059861 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.136818 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.136965 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.137013 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137075 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:04:45.137038651 +0000 UTC m=+84.463752389 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.137136 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137175 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.137189 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137195 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137244 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137256 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137259 4816 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137270 4816 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137300 4816 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137363 4816 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137323 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:45.137312788 +0000 UTC m=+84.464026526 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137466 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:45.137437572 +0000 UTC m=+84.464151340 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137499 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:45.137481443 +0000 UTC m=+84.464195211 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.137534 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:45.137521784 +0000 UTC m=+84.464235542 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.162694 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.162738 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.162748 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.162766 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.162778 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.266092 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.266168 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.266188 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.266214 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.266233 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.369977 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.370063 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.370089 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.370125 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.370151 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.371386 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 22:13:12.373844688 +0000 UTC Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.397805 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.397845 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.397895 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.398036 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.398119 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.398251 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.472696 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.472730 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.472740 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.472756 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.472767 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.575548 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.575586 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.575596 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.575618 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.575629 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.678615 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.678702 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.678713 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.678732 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.678763 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.735925 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/2.log" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.740838 4816 scope.go:117] "RemoveContainer" containerID="f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc" Feb 16 13:04:13 crc kubenswrapper[4816]: E0216 13:04:13.741129 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.764177 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.781785 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.781841 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.781864 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.781893 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.781915 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.789676 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.809331 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.824250 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.839898 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.858080 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.872143 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.885935 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.886013 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.886032 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.886055 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.886109 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.891310 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.910542 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.923745 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.939493 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.954470 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.971103 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.982389 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.988795 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.988863 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.988891 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.988921 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:13 crc kubenswrapper[4816]: I0216 13:04:13.988944 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:13Z","lastTransitionTime":"2026-02-16T13:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.000099 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:13Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.015172 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:14Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.035146 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:12Z\\\",\\\"message\\\":\\\"sions/factory.go:140\\\\nI0216 13:04:12.270266 6507 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0216 13:04:12.270460 6507 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:04:12.270548 6507 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:04:12.270921 6507 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:04:12.270960 6507 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0216 13:04:12.270969 6507 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0216 13:04:12.270992 6507 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0216 13:04:12.271018 6507 factory.go:656] Stopping watch factory\\\\nI0216 13:04:12.271046 6507 ovnkube.go:599] Stopped ovnkube\\\\nI0216 13:04:12.271076 6507 handler.go:208] Removed *v1.Node event handler 2\\\\nI0216 13:04:12.271111 6507 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:04:12.271114 6507 handler.go:208] Removed *v1.Node event handler 7\\\\nI0216 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:14Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.050820 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:14Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.093249 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.093300 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.093312 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.093332 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.093351 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:14Z","lastTransitionTime":"2026-02-16T13:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.196958 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.197058 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.197107 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.197131 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.197150 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:14Z","lastTransitionTime":"2026-02-16T13:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.300236 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.300369 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.300387 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.300410 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.300427 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:14Z","lastTransitionTime":"2026-02-16T13:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.372518 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 03:01:39.805976054 +0000 UTC Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.397927 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:14 crc kubenswrapper[4816]: E0216 13:04:14.398190 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.403767 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.403818 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.403836 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.403963 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.403990 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:14Z","lastTransitionTime":"2026-02-16T13:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.506607 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.506682 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.506830 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.506859 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.507107 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:14Z","lastTransitionTime":"2026-02-16T13:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.610960 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.611023 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.611041 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.611070 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.611094 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:14Z","lastTransitionTime":"2026-02-16T13:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.714963 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.715041 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.715058 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.715085 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.715104 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:14Z","lastTransitionTime":"2026-02-16T13:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.818281 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.818331 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.818348 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.818372 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.818391 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:14Z","lastTransitionTime":"2026-02-16T13:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.922191 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.922250 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.922267 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.922290 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:14 crc kubenswrapper[4816]: I0216 13:04:14.922309 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:14Z","lastTransitionTime":"2026-02-16T13:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.025862 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.025915 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.025932 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.025955 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.025972 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.128918 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.128966 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.128982 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.129005 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.129025 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.232226 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.232300 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.232338 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.232375 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.232401 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.336405 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.336490 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.336514 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.336543 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.336563 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.373522 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 05:04:06.299166631 +0000 UTC Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.398164 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.398210 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:15 crc kubenswrapper[4816]: E0216 13:04:15.398338 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.398368 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:15 crc kubenswrapper[4816]: E0216 13:04:15.398546 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:15 crc kubenswrapper[4816]: E0216 13:04:15.398651 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.440227 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.440300 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.440327 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.440358 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.440379 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.543920 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.543983 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.544005 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.544036 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.544059 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.647152 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.647223 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.647246 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.647276 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.647295 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.749789 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.749866 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.749923 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.749954 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.749971 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.853877 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.853955 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.853977 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.854007 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.854029 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.956443 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.956502 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.956518 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.956540 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:15 crc kubenswrapper[4816]: I0216 13:04:15.956557 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:15Z","lastTransitionTime":"2026-02-16T13:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.060012 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.060104 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.060129 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.060157 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.060178 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.067995 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:16 crc kubenswrapper[4816]: E0216 13:04:16.068174 4816 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:16 crc kubenswrapper[4816]: E0216 13:04:16.068281 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs podName:108200fc-f37f-4d80-bd46-314679989e11 nodeName:}" failed. No retries permitted until 2026-02-16 13:04:32.068252745 +0000 UTC m=+71.394966503 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs") pod "network-metrics-daemon-gfwts" (UID: "108200fc-f37f-4d80-bd46-314679989e11") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.163813 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.163921 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.163949 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.163984 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.164023 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.266790 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.266834 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.266846 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.266890 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.266902 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.369272 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.369303 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.369312 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.369329 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.369339 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.373788 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 07:11:04.90205039 +0000 UTC Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.398074 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:16 crc kubenswrapper[4816]: E0216 13:04:16.398255 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.472015 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.472074 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.472093 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.472118 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.472135 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.575439 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.575502 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.575519 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.575544 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.575746 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.678717 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.678790 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.678808 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.678831 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.678847 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.780844 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.780883 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.780896 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.780911 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.780923 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.883709 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.883750 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.883763 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.883779 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.883791 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.987311 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.987362 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.987379 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.987400 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:16 crc kubenswrapper[4816]: I0216 13:04:16.988159 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:16Z","lastTransitionTime":"2026-02-16T13:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.090728 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.090766 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.090776 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.090790 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.090806 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:17Z","lastTransitionTime":"2026-02-16T13:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.192639 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.192992 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.193001 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.193012 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.193021 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:17Z","lastTransitionTime":"2026-02-16T13:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.296182 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.296245 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.296262 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.296286 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.296303 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:17Z","lastTransitionTime":"2026-02-16T13:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.374171 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 23:32:21.196521016 +0000 UTC Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.397730 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.397834 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:17 crc kubenswrapper[4816]: E0216 13:04:17.398024 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.398123 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:17 crc kubenswrapper[4816]: E0216 13:04:17.397850 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:17 crc kubenswrapper[4816]: E0216 13:04:17.398184 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.398504 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.398528 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.398537 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.398548 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.398557 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:17Z","lastTransitionTime":"2026-02-16T13:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.500724 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.500765 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.500774 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.500787 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.500797 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:17Z","lastTransitionTime":"2026-02-16T13:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.603584 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.603643 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.603690 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.603715 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.603733 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:17Z","lastTransitionTime":"2026-02-16T13:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.706397 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.706456 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.706475 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.706500 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.706518 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:17Z","lastTransitionTime":"2026-02-16T13:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.810230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.810275 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.810294 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.810318 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.810335 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:17Z","lastTransitionTime":"2026-02-16T13:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.912993 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.913024 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.913031 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.913043 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:17 crc kubenswrapper[4816]: I0216 13:04:17.913051 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:17Z","lastTransitionTime":"2026-02-16T13:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.015935 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.015993 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.016026 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.016053 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.016074 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.054570 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.054621 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.054645 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.054710 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.054733 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: E0216 13:04:18.074709 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:18Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.079645 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.079739 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.079761 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.079788 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.079811 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: E0216 13:04:18.099623 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:18Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.103373 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.103438 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.103449 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.103469 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.103481 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: E0216 13:04:18.122617 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:18Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.129882 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.129940 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.129957 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.129979 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.129995 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: E0216 13:04:18.147898 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:18Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.152072 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.152138 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.152156 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.152181 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.152201 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: E0216 13:04:18.165300 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:18Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:18 crc kubenswrapper[4816]: E0216 13:04:18.165535 4816 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.167253 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.167293 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.167303 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.167318 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.167328 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.270329 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.270398 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.270415 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.270438 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.270456 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.373151 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.373198 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.373212 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.373230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.373243 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.375366 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 14:24:16.08403823 +0000 UTC Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.398068 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:18 crc kubenswrapper[4816]: E0216 13:04:18.398224 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.475560 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.475615 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.475628 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.475815 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.475834 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.579089 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.579138 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.579154 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.579178 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.579195 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.681997 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.682058 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.682075 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.682103 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.682120 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.784614 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.784673 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.784682 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.784695 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.784704 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.888102 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.888149 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.888164 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.888182 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.888194 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.991510 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.991571 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.991588 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.991613 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:18 crc kubenswrapper[4816]: I0216 13:04:18.991629 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:18Z","lastTransitionTime":"2026-02-16T13:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.094256 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.094314 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.094326 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.094345 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.094359 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:19Z","lastTransitionTime":"2026-02-16T13:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.197237 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.197300 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.197321 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.197343 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.197359 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:19Z","lastTransitionTime":"2026-02-16T13:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.299897 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.299968 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.299980 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.299997 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.300011 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:19Z","lastTransitionTime":"2026-02-16T13:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.375984 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 03:44:32.592644503 +0000 UTC Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.398722 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.398775 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.398815 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:19 crc kubenswrapper[4816]: E0216 13:04:19.399027 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:19 crc kubenswrapper[4816]: E0216 13:04:19.399148 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:19 crc kubenswrapper[4816]: E0216 13:04:19.399222 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.403225 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.403274 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.403291 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.403315 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.403332 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:19Z","lastTransitionTime":"2026-02-16T13:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.506484 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.506557 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.506574 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.506595 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.506611 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:19Z","lastTransitionTime":"2026-02-16T13:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.608886 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.608951 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.608968 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.608991 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.609008 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:19Z","lastTransitionTime":"2026-02-16T13:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.713865 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.713900 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.713913 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.713928 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.713940 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:19Z","lastTransitionTime":"2026-02-16T13:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.816277 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.816312 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.816323 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.816338 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.816348 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:19Z","lastTransitionTime":"2026-02-16T13:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.919063 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.919122 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.919131 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.919143 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:19 crc kubenswrapper[4816]: I0216 13:04:19.919152 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:19Z","lastTransitionTime":"2026-02-16T13:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.021837 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.021932 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.021958 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.022476 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.022814 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.126643 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.126744 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.126770 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.126800 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.126820 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.229957 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.230048 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.230073 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.230098 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.230117 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.333159 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.333217 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.333233 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.333256 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.333278 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.376341 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 07:20:33.522637687 +0000 UTC Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.398301 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:20 crc kubenswrapper[4816]: E0216 13:04:20.398533 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.438330 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.438734 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.439090 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.439483 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.439839 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.543516 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.543867 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.543998 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.544205 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.544346 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.646493 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.646540 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.646551 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.646567 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.646578 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.748950 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.748989 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.748999 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.749017 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.749026 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.850912 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.850946 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.850954 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.850967 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.850976 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.953367 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.953423 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.953434 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.953453 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:20 crc kubenswrapper[4816]: I0216 13:04:20.953463 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:20Z","lastTransitionTime":"2026-02-16T13:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.056692 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.056733 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.056741 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.056755 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.056762 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.158925 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.159000 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.159015 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.159038 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.159055 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.262798 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.262859 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.262878 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.262903 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.262921 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.365688 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.365742 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.365758 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.365778 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.365793 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.377198 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 21:20:45.939552819 +0000 UTC Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.398236 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.398320 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:21 crc kubenswrapper[4816]: E0216 13:04:21.398428 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:21 crc kubenswrapper[4816]: E0216 13:04:21.398529 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.398604 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:21 crc kubenswrapper[4816]: E0216 13:04:21.398864 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.418156 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.433056 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.445566 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.465492 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.467742 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.467885 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.467986 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.468100 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.468206 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.478938 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.496436 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.512340 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.533627 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:12Z\\\",\\\"message\\\":\\\"sions/factory.go:140\\\\nI0216 13:04:12.270266 6507 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0216 13:04:12.270460 6507 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:04:12.270548 6507 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:04:12.270921 6507 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:04:12.270960 6507 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0216 13:04:12.270969 6507 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0216 13:04:12.270992 6507 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0216 13:04:12.271018 6507 factory.go:656] Stopping watch factory\\\\nI0216 13:04:12.271046 6507 ovnkube.go:599] Stopped ovnkube\\\\nI0216 13:04:12.271076 6507 handler.go:208] Removed *v1.Node event handler 2\\\\nI0216 13:04:12.271111 6507 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:04:12.271114 6507 handler.go:208] Removed *v1.Node event handler 7\\\\nI0216 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.544889 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.560537 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.570510 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.570563 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.570575 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.570589 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.570598 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.570716 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.593322 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.607601 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.620293 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.641292 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.655192 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.668702 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.673975 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.674074 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.674090 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.674144 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.674162 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.686295 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:21Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.776874 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.776919 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.776932 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.776948 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.776959 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.879876 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.879918 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.879927 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.879945 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.879954 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.983418 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.983462 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.983473 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.983491 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:21 crc kubenswrapper[4816]: I0216 13:04:21.983503 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:21Z","lastTransitionTime":"2026-02-16T13:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.086362 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.086447 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.086475 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.086506 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.086529 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:22Z","lastTransitionTime":"2026-02-16T13:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.190088 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.190218 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.190241 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.190266 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.190283 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:22Z","lastTransitionTime":"2026-02-16T13:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.292767 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.292914 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.293008 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.293044 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.293079 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:22Z","lastTransitionTime":"2026-02-16T13:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.377925 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 06:06:51.326046879 +0000 UTC Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.396719 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.396777 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.396787 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.396802 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.396812 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:22Z","lastTransitionTime":"2026-02-16T13:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.397614 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:22 crc kubenswrapper[4816]: E0216 13:04:22.397867 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.499786 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.499841 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.499857 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.499878 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.499894 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:22Z","lastTransitionTime":"2026-02-16T13:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.603460 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.603557 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.603582 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.603753 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.603843 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:22Z","lastTransitionTime":"2026-02-16T13:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.707071 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.707142 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.707157 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.707182 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.707197 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:22Z","lastTransitionTime":"2026-02-16T13:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.810840 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.810918 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.810941 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.810969 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.810991 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:22Z","lastTransitionTime":"2026-02-16T13:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.913728 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.913770 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.913782 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.913800 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:22 crc kubenswrapper[4816]: I0216 13:04:22.913812 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:22Z","lastTransitionTime":"2026-02-16T13:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.016533 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.016589 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.016610 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.016634 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.016689 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.119385 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.119463 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.119489 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.119520 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.119543 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.223225 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.223279 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.223289 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.223309 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.223323 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.326357 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.326405 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.326418 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.326435 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.326448 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.378750 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 07:55:56.988830675 +0000 UTC Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.398508 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.398599 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.398684 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:23 crc kubenswrapper[4816]: E0216 13:04:23.398746 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:23 crc kubenswrapper[4816]: E0216 13:04:23.398896 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:23 crc kubenswrapper[4816]: E0216 13:04:23.399163 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.429924 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.430134 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.430173 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.430211 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.430241 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.533442 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.533513 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.533536 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.533733 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.533794 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.635810 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.635837 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.635845 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.635857 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.635868 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.738497 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.738563 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.738575 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.738591 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.738603 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.841822 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.841885 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.841905 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.841928 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.841945 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.944460 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.944514 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.944525 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.944546 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:23 crc kubenswrapper[4816]: I0216 13:04:23.944562 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:23Z","lastTransitionTime":"2026-02-16T13:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.046085 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.046124 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.046135 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.046148 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.046158 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.148498 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.148598 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.148616 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.148639 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.148691 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.250756 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.250791 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.250800 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.250816 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.250825 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.353295 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.353328 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.353336 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.353353 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.353362 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.378914 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 00:57:34.270278839 +0000 UTC Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.398593 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:24 crc kubenswrapper[4816]: E0216 13:04:24.398768 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.456070 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.456158 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.456184 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.456218 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.456241 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.558448 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.558489 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.558500 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.558516 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.558529 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.660308 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.660348 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.660359 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.660375 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.660387 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.765559 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.765606 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.765615 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.765631 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.765639 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.868078 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.868128 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.868139 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.868155 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.868163 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.970381 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.970432 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.970446 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.970462 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:24 crc kubenswrapper[4816]: I0216 13:04:24.970473 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:24Z","lastTransitionTime":"2026-02-16T13:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.073391 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.073451 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.073466 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.073490 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.073506 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.176536 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.176589 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.176601 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.176620 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.176635 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.279280 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.279330 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.279342 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.279357 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.279368 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.379272 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 19:15:44.126346301 +0000 UTC Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.382068 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.382100 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.382114 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.382129 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.382141 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.398550 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.398556 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.398742 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:25 crc kubenswrapper[4816]: E0216 13:04:25.398648 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:25 crc kubenswrapper[4816]: E0216 13:04:25.398797 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:25 crc kubenswrapper[4816]: E0216 13:04:25.398872 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.484412 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.484453 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.484464 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.484482 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.484493 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.587062 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.587163 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.587184 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.587211 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.587231 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.689622 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.689696 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.689713 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.689734 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.689752 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.791634 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.791704 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.791716 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.791733 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.791743 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.894391 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.894436 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.894447 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.894465 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.894478 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.997072 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.997110 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.997120 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.997134 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:25 crc kubenswrapper[4816]: I0216 13:04:25.997144 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:25Z","lastTransitionTime":"2026-02-16T13:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.099599 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.099698 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.099716 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.099732 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.099743 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:26Z","lastTransitionTime":"2026-02-16T13:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.202374 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.202424 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.202437 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.202455 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.202467 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:26Z","lastTransitionTime":"2026-02-16T13:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.304398 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.304436 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.304447 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.304465 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.304476 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:26Z","lastTransitionTime":"2026-02-16T13:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.379563 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 09:07:55.993415856 +0000 UTC Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.397911 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:26 crc kubenswrapper[4816]: E0216 13:04:26.398059 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.407134 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.407179 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.407188 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.407203 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.407214 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:26Z","lastTransitionTime":"2026-02-16T13:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.509391 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.509425 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.509434 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.509448 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.509458 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:26Z","lastTransitionTime":"2026-02-16T13:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.612935 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.612987 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.613005 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.613030 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.613048 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:26Z","lastTransitionTime":"2026-02-16T13:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.716065 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.716114 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.716129 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.716151 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.716168 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:26Z","lastTransitionTime":"2026-02-16T13:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.821733 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.821770 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.821780 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.821795 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.821805 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:26Z","lastTransitionTime":"2026-02-16T13:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.925075 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.925127 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.925139 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.925165 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:26 crc kubenswrapper[4816]: I0216 13:04:26.925179 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:26Z","lastTransitionTime":"2026-02-16T13:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.027608 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.027672 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.027685 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.027703 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.027716 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.130856 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.130896 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.130908 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.130924 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.130937 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.233837 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.233885 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.233893 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.233907 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.233917 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.336166 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.336200 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.336209 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.336223 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.336232 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.379976 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 06:15:41.637745245 +0000 UTC Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.398619 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.398763 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.398832 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:27 crc kubenswrapper[4816]: E0216 13:04:27.398840 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:27 crc kubenswrapper[4816]: E0216 13:04:27.399019 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:27 crc kubenswrapper[4816]: E0216 13:04:27.399115 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.438639 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.438701 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.438713 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.438730 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.438742 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.542140 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.542179 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.542192 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.542230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.542242 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.644392 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.644461 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.644475 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.644493 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.644505 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.747656 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.747699 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.747708 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.747721 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.747730 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.850256 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.850299 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.850311 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.850354 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.850366 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.952481 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.952532 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.952544 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.952564 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:27 crc kubenswrapper[4816]: I0216 13:04:27.952576 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:27Z","lastTransitionTime":"2026-02-16T13:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.055146 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.055189 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.055200 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.055216 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.055226 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.157021 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.157045 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.157054 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.157063 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.157071 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.258776 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.258809 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.258817 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.258830 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.258839 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.361425 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.361465 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.361473 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.361487 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.361495 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.380425 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 06:27:39.002500526 +0000 UTC Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.398039 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:28 crc kubenswrapper[4816]: E0216 13:04:28.398456 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.398730 4816 scope.go:117] "RemoveContainer" containerID="f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc" Feb 16 13:04:28 crc kubenswrapper[4816]: E0216 13:04:28.399049 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.464292 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.464333 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.464346 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.464363 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.464377 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.519251 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.519293 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.519304 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.519320 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.519331 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: E0216 13:04:28.533630 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:28Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.538238 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.538370 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.538397 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.538426 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.538448 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: E0216 13:04:28.553281 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:28Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.557572 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.557639 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.557703 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.557736 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.557758 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: E0216 13:04:28.571866 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:28Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.575474 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.575527 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.575538 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.575557 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.575568 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: E0216 13:04:28.595825 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:28Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.599924 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.599987 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.600010 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.600041 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.600076 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: E0216 13:04:28.616767 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:28Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:28 crc kubenswrapper[4816]: E0216 13:04:28.616877 4816 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.618849 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.618888 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.618900 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.618917 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.618929 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.721619 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.721696 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.721714 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.721734 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.721749 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.823213 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.823264 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.823281 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.823301 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.823317 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.925916 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.925968 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.925986 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.926009 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:28 crc kubenswrapper[4816]: I0216 13:04:28.926026 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:28Z","lastTransitionTime":"2026-02-16T13:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.028457 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.028533 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.028554 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.028583 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.028605 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.131366 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.131421 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.131439 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.131464 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.131486 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.234593 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.234649 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.234711 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.234739 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.234757 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.338362 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.338459 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.338517 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.338617 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.338694 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.381388 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 13:16:01.401531977 +0000 UTC Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.397985 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.398097 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.397989 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:29 crc kubenswrapper[4816]: E0216 13:04:29.398177 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:29 crc kubenswrapper[4816]: E0216 13:04:29.398269 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:29 crc kubenswrapper[4816]: E0216 13:04:29.398406 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.442442 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.442488 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.442500 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.442519 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.442533 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.545459 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.545494 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.545503 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.545515 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.545560 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.648162 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.648193 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.648201 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.648214 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.648222 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.750210 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.750260 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.750273 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.750289 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.750313 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.853301 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.853359 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.853376 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.853400 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.853417 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.956389 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.956483 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.956544 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.956573 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:29 crc kubenswrapper[4816]: I0216 13:04:29.956646 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:29Z","lastTransitionTime":"2026-02-16T13:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.060580 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.060651 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.060682 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.060702 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.060720 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.163714 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.163763 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.163781 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.163803 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.163820 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.266671 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.266705 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.266715 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.266729 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.266742 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.369637 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.369721 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.369737 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.369759 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.369775 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.381872 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 01:31:48.604625228 +0000 UTC Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.398538 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:30 crc kubenswrapper[4816]: E0216 13:04:30.398786 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.472744 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.472799 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.472815 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.472834 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.472849 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.574881 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.574946 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.574962 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.574984 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.574998 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.678286 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.678333 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.678562 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.678582 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.678928 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.784776 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.784823 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.784834 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.784848 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.784857 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.887044 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.887351 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.887363 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.887379 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.887390 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.989734 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.989836 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.989853 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.989876 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:30 crc kubenswrapper[4816]: I0216 13:04:30.989893 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:30Z","lastTransitionTime":"2026-02-16T13:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.092328 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.092378 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.092390 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.092457 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.092473 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:31Z","lastTransitionTime":"2026-02-16T13:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.195032 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.195077 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.195089 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.195105 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.195116 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:31Z","lastTransitionTime":"2026-02-16T13:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.297457 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.297501 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.297511 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.297526 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.297537 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:31Z","lastTransitionTime":"2026-02-16T13:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.382943 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 23:33:33.927605083 +0000 UTC Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.397760 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.397810 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:31 crc kubenswrapper[4816]: E0216 13:04:31.397888 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.397957 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:31 crc kubenswrapper[4816]: E0216 13:04:31.398180 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:31 crc kubenswrapper[4816]: E0216 13:04:31.398365 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.400150 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.400189 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.400204 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.400228 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.400266 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:31Z","lastTransitionTime":"2026-02-16T13:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.410380 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.420846 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.430994 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.441913 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.454771 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.464892 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.475098 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.487027 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.500298 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.502078 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.502118 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.502129 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.502145 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.502160 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:31Z","lastTransitionTime":"2026-02-16T13:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.515502 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.558245 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.572384 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.592610 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:12Z\\\",\\\"message\\\":\\\"sions/factory.go:140\\\\nI0216 13:04:12.270266 6507 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0216 13:04:12.270460 6507 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:04:12.270548 6507 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:04:12.270921 6507 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:04:12.270960 6507 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0216 13:04:12.270969 6507 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0216 13:04:12.270992 6507 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0216 13:04:12.271018 6507 factory.go:656] Stopping watch factory\\\\nI0216 13:04:12.271046 6507 ovnkube.go:599] Stopped ovnkube\\\\nI0216 13:04:12.271076 6507 handler.go:208] Removed *v1.Node event handler 2\\\\nI0216 13:04:12.271111 6507 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:04:12.271114 6507 handler.go:208] Removed *v1.Node event handler 7\\\\nI0216 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.601796 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.604025 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.604062 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.604108 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.604128 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.604141 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:31Z","lastTransitionTime":"2026-02-16T13:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.613882 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.631134 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.641577 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.649985 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:31Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.706597 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.706678 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.706694 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.706713 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.706726 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:31Z","lastTransitionTime":"2026-02-16T13:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.809690 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.809753 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.809762 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.809778 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.809801 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:31Z","lastTransitionTime":"2026-02-16T13:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.913153 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.913209 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.913224 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.913245 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:31 crc kubenswrapper[4816]: I0216 13:04:31.913263 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:31Z","lastTransitionTime":"2026-02-16T13:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.016296 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.016364 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.016382 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.016406 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.016425 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.118980 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.119050 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.119070 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.119098 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.119118 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.136748 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:32 crc kubenswrapper[4816]: E0216 13:04:32.136918 4816 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:32 crc kubenswrapper[4816]: E0216 13:04:32.137025 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs podName:108200fc-f37f-4d80-bd46-314679989e11 nodeName:}" failed. No retries permitted until 2026-02-16 13:05:04.136996865 +0000 UTC m=+103.463710623 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs") pod "network-metrics-daemon-gfwts" (UID: "108200fc-f37f-4d80-bd46-314679989e11") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.222000 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.222047 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.222065 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.222088 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.222104 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.324320 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.324368 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.324382 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.324400 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.324414 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.383330 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 13:55:50.243397141 +0000 UTC Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.397701 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:32 crc kubenswrapper[4816]: E0216 13:04:32.397873 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.427622 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.427699 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.427714 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.427731 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.427742 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.530675 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.530723 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.530735 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.530753 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.530767 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.635864 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.636611 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.636863 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.637380 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.637526 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.740725 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.740775 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.740785 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.740798 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.740807 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.842943 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.843002 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.843019 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.843107 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.843128 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.944846 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.945088 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.945170 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.945236 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:32 crc kubenswrapper[4816]: I0216 13:04:32.945293 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:32Z","lastTransitionTime":"2026-02-16T13:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.047600 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.047793 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.047875 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.047933 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.047995 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.149630 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.149743 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.149759 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.149781 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.149798 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.252323 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.252569 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.252633 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.252733 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.252795 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.355321 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.355887 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.356072 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.356179 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.356263 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.384376 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 23:38:38.577842251 +0000 UTC Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.398470 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:33 crc kubenswrapper[4816]: E0216 13:04:33.398590 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.398470 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:33 crc kubenswrapper[4816]: E0216 13:04:33.398692 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.398875 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:33 crc kubenswrapper[4816]: E0216 13:04:33.399034 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.458815 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.458847 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.458856 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.458871 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.458881 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.561237 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.561283 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.561297 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.561313 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.561326 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.663673 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.663708 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.663718 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.663731 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.663742 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.765894 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.766133 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.766201 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.766264 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.766329 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.796800 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/0.log" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.796842 4816 generic.go:334] "Generic (PLEG): container finished" podID="2a58f937-7095-4c3c-b401-3a68ae936b86" containerID="06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191" exitCode=1 Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.796864 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-69xcw" event={"ID":"2a58f937-7095-4c3c-b401-3a68ae936b86","Type":"ContainerDied","Data":"06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.797182 4816 scope.go:117] "RemoveContainer" containerID="06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.816949 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:12Z\\\",\\\"message\\\":\\\"sions/factory.go:140\\\\nI0216 13:04:12.270266 6507 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0216 13:04:12.270460 6507 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:04:12.270548 6507 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:04:12.270921 6507 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:04:12.270960 6507 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0216 13:04:12.270969 6507 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0216 13:04:12.270992 6507 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0216 13:04:12.271018 6507 factory.go:656] Stopping watch factory\\\\nI0216 13:04:12.271046 6507 ovnkube.go:599] Stopped ovnkube\\\\nI0216 13:04:12.271076 6507 handler.go:208] Removed *v1.Node event handler 2\\\\nI0216 13:04:12.271111 6507 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:04:12.271114 6507 handler.go:208] Removed *v1.Node event handler 7\\\\nI0216 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.830263 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.843898 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.858167 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.867943 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.867983 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.867995 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.868011 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.868024 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.876217 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.890513 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.900695 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.919632 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.932614 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.945399 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.958577 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.970088 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.970133 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.970145 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.970161 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.970171 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:33Z","lastTransitionTime":"2026-02-16T13:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.971620 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.981868 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:33 crc kubenswrapper[4816]: I0216 13:04:33.994013 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:33Z\\\",\\\"message\\\":\\\"2026-02-16T13:03:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843\\\\n2026-02-16T13:03:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843 to /host/opt/cni/bin/\\\\n2026-02-16T13:03:48Z [verbose] multus-daemon started\\\\n2026-02-16T13:03:48Z [verbose] Readiness Indicator file check\\\\n2026-02-16T13:04:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:33Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.012590 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.025019 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.036383 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.048327 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.072080 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.072108 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.072116 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.072131 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.072142 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:34Z","lastTransitionTime":"2026-02-16T13:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.174745 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.174785 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.174795 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.174809 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.174820 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:34Z","lastTransitionTime":"2026-02-16T13:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.278100 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.278138 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.278149 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.278164 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.278175 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:34Z","lastTransitionTime":"2026-02-16T13:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.384037 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.384101 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.384116 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.384214 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.384269 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:34Z","lastTransitionTime":"2026-02-16T13:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.384461 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 07:59:20.665152542 +0000 UTC Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.398534 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:34 crc kubenswrapper[4816]: E0216 13:04:34.398670 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.487062 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.487090 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.487103 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.487123 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.487139 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:34Z","lastTransitionTime":"2026-02-16T13:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.590116 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.590146 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.590157 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.590173 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.590185 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:34Z","lastTransitionTime":"2026-02-16T13:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.692532 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.692574 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.692588 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.692603 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.692613 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:34Z","lastTransitionTime":"2026-02-16T13:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.795819 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.795900 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.795936 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.795969 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.795993 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:34Z","lastTransitionTime":"2026-02-16T13:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.803169 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/0.log" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.803238 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-69xcw" event={"ID":"2a58f937-7095-4c3c-b401-3a68ae936b86","Type":"ContainerStarted","Data":"a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.822321 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.841378 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.853675 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.864113 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.880072 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.894899 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.899850 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.899991 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.900016 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.900097 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.900217 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:34Z","lastTransitionTime":"2026-02-16T13:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.910040 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.924557 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.938874 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:33Z\\\",\\\"message\\\":\\\"2026-02-16T13:03:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843\\\\n2026-02-16T13:03:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843 to /host/opt/cni/bin/\\\\n2026-02-16T13:03:48Z [verbose] multus-daemon started\\\\n2026-02-16T13:03:48Z [verbose] Readiness Indicator file check\\\\n2026-02-16T13:04:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:04:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.953614 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.965234 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.976387 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:34 crc kubenswrapper[4816]: I0216 13:04:34.992645 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:12Z\\\",\\\"message\\\":\\\"sions/factory.go:140\\\\nI0216 13:04:12.270266 6507 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0216 13:04:12.270460 6507 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:04:12.270548 6507 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:04:12.270921 6507 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:04:12.270960 6507 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0216 13:04:12.270969 6507 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0216 13:04:12.270992 6507 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0216 13:04:12.271018 6507 factory.go:656] Stopping watch factory\\\\nI0216 13:04:12.271046 6507 ovnkube.go:599] Stopped ovnkube\\\\nI0216 13:04:12.271076 6507 handler.go:208] Removed *v1.Node event handler 2\\\\nI0216 13:04:12.271111 6507 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:04:12.271114 6507 handler.go:208] Removed *v1.Node event handler 7\\\\nI0216 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:34Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.002816 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.002854 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.002866 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.002879 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.002890 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.003180 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:35Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.021063 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:35Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.037962 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:35Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.048630 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:35Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.058523 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:35Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.105776 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.105810 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.105822 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.105837 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.105847 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.207447 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.207483 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.207493 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.207508 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.207517 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.310123 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.310369 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.310522 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.310644 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.310805 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.385246 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 11:24:49.17289924 +0000 UTC Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.398828 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:35 crc kubenswrapper[4816]: E0216 13:04:35.398972 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.398985 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.399103 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:35 crc kubenswrapper[4816]: E0216 13:04:35.399184 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:35 crc kubenswrapper[4816]: E0216 13:04:35.399318 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.413796 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.413848 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.413866 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.413889 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.413908 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.517464 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.517547 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.517573 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.517603 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.517628 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.620807 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.620848 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.620857 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.620871 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.620880 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.723211 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.723278 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.723296 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.723322 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.723341 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.826297 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.826370 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.826391 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.826417 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.826435 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.929155 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.929206 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.929230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.929257 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:35 crc kubenswrapper[4816]: I0216 13:04:35.929275 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:35Z","lastTransitionTime":"2026-02-16T13:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.032469 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.032524 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.032545 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.032567 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.032588 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.136210 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.136268 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.136284 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.136308 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.136326 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.239501 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.239557 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.239577 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.239600 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.239618 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.342924 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.342970 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.342986 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.343009 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.343026 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.385523 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 19:31:44.754238085 +0000 UTC Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.398097 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:36 crc kubenswrapper[4816]: E0216 13:04:36.398250 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.445825 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.445888 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.445908 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.445933 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.445950 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.548483 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.548524 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.548540 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.548561 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.548579 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.652385 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.652506 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.652533 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.652614 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.652715 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.755356 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.755407 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.755423 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.755447 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.755467 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.858011 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.858067 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.858086 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.858108 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.858124 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.962158 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.962230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.962254 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.962282 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:36 crc kubenswrapper[4816]: I0216 13:04:36.962299 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:36Z","lastTransitionTime":"2026-02-16T13:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.066240 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.066328 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.066349 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.066382 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.066405 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.169273 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.169314 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.169326 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.169343 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.169352 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.272427 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.272514 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.272535 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.272560 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.272578 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.377205 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.377276 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.377293 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.377318 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.377336 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.386631 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 22:32:35.835163773 +0000 UTC Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.398366 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.398403 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:37 crc kubenswrapper[4816]: E0216 13:04:37.398535 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:37 crc kubenswrapper[4816]: E0216 13:04:37.398754 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.398566 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:37 crc kubenswrapper[4816]: E0216 13:04:37.399116 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.480512 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.480568 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.480585 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.480609 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.480625 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.583705 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.583767 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.583783 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.583806 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.583823 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.686397 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.686431 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.686439 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.686453 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.686466 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.788333 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.788386 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.788402 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.788424 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.788440 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.891600 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.891698 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.891723 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.891753 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.891779 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.995379 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.995443 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.995460 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.995484 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:37 crc kubenswrapper[4816]: I0216 13:04:37.995502 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:37Z","lastTransitionTime":"2026-02-16T13:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.097996 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.098036 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.098048 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.098065 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.098076 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.200969 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.201082 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.201102 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.201173 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.201194 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.305537 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.305612 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.305637 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.305725 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.305756 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.386831 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 00:18:23.138254883 +0000 UTC Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.398258 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:38 crc kubenswrapper[4816]: E0216 13:04:38.398411 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.408125 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.408160 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.408175 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.408193 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.408206 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.511188 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.511244 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.511263 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.511289 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.511310 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.613501 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.613893 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.614101 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.614290 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.614430 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.717821 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.718230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.718546 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.718841 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.719109 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.822131 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.822193 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.822213 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.822239 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.822257 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.914394 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.914462 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.914479 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.914503 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.914522 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: E0216 13:04:38.937616 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:38Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.943280 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.943341 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.943358 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.943381 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.943398 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: E0216 13:04:38.965241 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:38Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.970625 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.970739 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.970766 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.970799 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.970822 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:38 crc kubenswrapper[4816]: E0216 13:04:38.991365 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:38Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.996618 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.996717 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.996736 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.996761 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:38 crc kubenswrapper[4816]: I0216 13:04:38.996779 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:38Z","lastTransitionTime":"2026-02-16T13:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: E0216 13:04:39.014402 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:39Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.019835 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.019899 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.019918 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.019943 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.019963 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: E0216 13:04:39.039191 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:39Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:39 crc kubenswrapper[4816]: E0216 13:04:39.039415 4816 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.045068 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.045136 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.045706 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.045744 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.045770 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.150748 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.151400 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.151607 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.152241 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.153212 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.256757 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.256823 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.256843 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.256873 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.256892 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.359634 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.359720 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.359736 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.359763 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.359779 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.387931 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 08:44:12.075431464 +0000 UTC Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.398252 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.398333 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:39 crc kubenswrapper[4816]: E0216 13:04:39.398421 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.398465 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:39 crc kubenswrapper[4816]: E0216 13:04:39.398689 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:39 crc kubenswrapper[4816]: E0216 13:04:39.398764 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.462861 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.462938 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.462964 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.462993 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.463014 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.565623 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.565682 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.565695 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.565711 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.565724 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.670213 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.670627 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.670919 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.671129 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.671350 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.774792 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.774853 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.774864 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.774883 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.774897 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.878867 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.878920 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.878936 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.878961 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.878979 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.982121 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.982242 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.982267 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.982298 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:39 crc kubenswrapper[4816]: I0216 13:04:39.982323 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:39Z","lastTransitionTime":"2026-02-16T13:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.085332 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.085754 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.085863 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.085957 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.086051 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:40Z","lastTransitionTime":"2026-02-16T13:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.189283 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.189349 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.189368 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.189398 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.189419 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:40Z","lastTransitionTime":"2026-02-16T13:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.292960 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.293035 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.293060 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.293094 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.293117 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:40Z","lastTransitionTime":"2026-02-16T13:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.388890 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 20:24:59.63086258 +0000 UTC Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.396384 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.396443 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.396461 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.396482 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.396498 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:40Z","lastTransitionTime":"2026-02-16T13:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.398069 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:40 crc kubenswrapper[4816]: E0216 13:04:40.398336 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.499538 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.499593 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.499609 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.499635 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.499683 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:40Z","lastTransitionTime":"2026-02-16T13:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.603324 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.603402 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.603426 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.603455 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.603478 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:40Z","lastTransitionTime":"2026-02-16T13:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.706895 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.706950 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.706972 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.707001 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.707024 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:40Z","lastTransitionTime":"2026-02-16T13:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.809902 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.809965 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.809978 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.809996 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.810010 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:40Z","lastTransitionTime":"2026-02-16T13:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.913512 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.913562 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.913578 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.913600 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:40 crc kubenswrapper[4816]: I0216 13:04:40.913617 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:40Z","lastTransitionTime":"2026-02-16T13:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.016367 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.016433 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.016456 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.016486 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.016510 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.119586 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.119635 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.119646 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.119682 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.119696 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.222802 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.222874 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.222897 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.222929 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.222951 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.325284 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.325341 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.325350 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.325364 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.325373 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.389259 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 12:12:12.100410579 +0000 UTC Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.397753 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.397822 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.397754 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:41 crc kubenswrapper[4816]: E0216 13:04:41.397940 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:41 crc kubenswrapper[4816]: E0216 13:04:41.398139 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:41 crc kubenswrapper[4816]: E0216 13:04:41.398370 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.412182 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.427776 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.428531 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.428610 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.428628 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.428744 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.428762 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.449812 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.484126 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.511553 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.530958 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.531009 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.531023 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.531040 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.531052 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.531570 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.545301 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.559474 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.580161 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:33Z\\\",\\\"message\\\":\\\"2026-02-16T13:03:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843\\\\n2026-02-16T13:03:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843 to /host/opt/cni/bin/\\\\n2026-02-16T13:03:48Z [verbose] multus-daemon started\\\\n2026-02-16T13:03:48Z [verbose] Readiness Indicator file check\\\\n2026-02-16T13:04:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:04:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.597452 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.610214 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.626872 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.633219 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.633249 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.633258 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.633270 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.633279 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.643882 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.662566 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.678161 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.697168 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.712244 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.735684 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.735738 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.735753 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.735774 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.735790 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.735843 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:12Z\\\",\\\"message\\\":\\\"sions/factory.go:140\\\\nI0216 13:04:12.270266 6507 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0216 13:04:12.270460 6507 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:04:12.270548 6507 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:04:12.270921 6507 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:04:12.270960 6507 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0216 13:04:12.270969 6507 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0216 13:04:12.270992 6507 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0216 13:04:12.271018 6507 factory.go:656] Stopping watch factory\\\\nI0216 13:04:12.271046 6507 ovnkube.go:599] Stopped ovnkube\\\\nI0216 13:04:12.271076 6507 handler.go:208] Removed *v1.Node event handler 2\\\\nI0216 13:04:12.271111 6507 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:04:12.271114 6507 handler.go:208] Removed *v1.Node event handler 7\\\\nI0216 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:41Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.837602 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.837634 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.837642 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.837667 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.837676 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.941014 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.941328 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.941348 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.941370 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:41 crc kubenswrapper[4816]: I0216 13:04:41.941387 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:41Z","lastTransitionTime":"2026-02-16T13:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.043747 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.043803 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.043825 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.043852 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.043872 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.147546 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.147605 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.147621 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.147636 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.147645 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.250426 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.250464 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.250473 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.250488 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.250498 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.353702 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.353748 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.353760 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.353775 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.353788 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.389444 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 08:49:27.171117319 +0000 UTC Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.397864 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:42 crc kubenswrapper[4816]: E0216 13:04:42.398539 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.415605 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.456804 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.456871 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.456894 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.456923 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.456943 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.560220 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.560279 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.560297 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.560322 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.560339 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.663012 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.663066 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.663083 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.663106 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.663123 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.766443 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.766522 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.766549 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.766578 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.766601 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.869815 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.869905 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.869936 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.869966 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.869993 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.973752 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.974178 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.974388 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.974595 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:42 crc kubenswrapper[4816]: I0216 13:04:42.974821 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:42Z","lastTransitionTime":"2026-02-16T13:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.078163 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.078237 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.078254 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.078278 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.078297 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:43Z","lastTransitionTime":"2026-02-16T13:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.181149 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.181186 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.181213 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.181230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.181239 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:43Z","lastTransitionTime":"2026-02-16T13:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.284481 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.284590 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.284616 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.284648 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.284706 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:43Z","lastTransitionTime":"2026-02-16T13:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.387378 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.387441 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.387450 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.387467 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.387485 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:43Z","lastTransitionTime":"2026-02-16T13:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.390578 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 17:49:40.294906162 +0000 UTC Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.398016 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.398017 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.398083 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:43 crc kubenswrapper[4816]: E0216 13:04:43.398415 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:43 crc kubenswrapper[4816]: E0216 13:04:43.398599 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.398626 4816 scope.go:117] "RemoveContainer" containerID="f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc" Feb 16 13:04:43 crc kubenswrapper[4816]: E0216 13:04:43.398650 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.489556 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.489637 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.489719 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.489754 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.489774 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:43Z","lastTransitionTime":"2026-02-16T13:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.592186 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.592218 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.592226 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.592241 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.592251 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:43Z","lastTransitionTime":"2026-02-16T13:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.695165 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.695224 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.695241 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.695265 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.695283 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:43Z","lastTransitionTime":"2026-02-16T13:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.804047 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.804109 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.804125 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.804151 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.804168 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:43Z","lastTransitionTime":"2026-02-16T13:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.834221 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/2.log" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.837239 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.837720 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.854515 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:43Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.871847 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:43Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.885452 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:43Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.922255 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.922315 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.922334 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.922356 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.922385 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:43Z","lastTransitionTime":"2026-02-16T13:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.927722 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:43Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.945352 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:43Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.961091 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:33Z\\\",\\\"message\\\":\\\"2026-02-16T13:03:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843\\\\n2026-02-16T13:03:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843 to /host/opt/cni/bin/\\\\n2026-02-16T13:03:48Z [verbose] multus-daemon started\\\\n2026-02-16T13:03:48Z [verbose] Readiness Indicator file check\\\\n2026-02-16T13:04:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:04:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:43Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.972811 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:43Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.984913 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:43Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:43 crc kubenswrapper[4816]: I0216 13:04:43.995055 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:43Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.015427 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:12Z\\\",\\\"message\\\":\\\"sions/factory.go:140\\\\nI0216 13:04:12.270266 6507 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0216 13:04:12.270460 6507 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:04:12.270548 6507 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:04:12.270921 6507 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:04:12.270960 6507 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0216 13:04:12.270969 6507 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0216 13:04:12.270992 6507 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0216 13:04:12.271018 6507 factory.go:656] Stopping watch factory\\\\nI0216 13:04:12.271046 6507 ovnkube.go:599] Stopped ovnkube\\\\nI0216 13:04:12.271076 6507 handler.go:208] Removed *v1.Node event handler 2\\\\nI0216 13:04:12.271111 6507 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:04:12.271114 6507 handler.go:208] Removed *v1.Node event handler 7\\\\nI0216 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.024800 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.024833 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.024845 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.024862 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.024875 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.029257 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.046087 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.059227 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcfe164a-55bf-4189-9193-9380765c1cce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c57fdd5d951392300116c23555a51129532c6b178080c46e40459ab3eb1b9ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac42763639ae9438ba636a65dbb950a49e2a137e92eeb9641500a1bc45e3c92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ac42763639ae9438ba636a65dbb950a49e2a137e92eeb9641500a1bc45e3c92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.081599 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.106320 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.123927 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.127686 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.127744 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.127759 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.127781 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.127793 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.139607 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.153285 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.164984 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.231633 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.231707 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.231719 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.231744 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.231760 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.334960 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.335001 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.335010 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.335026 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.335038 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.391675 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 23:53:20.386063858 +0000 UTC Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.397863 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:44 crc kubenswrapper[4816]: E0216 13:04:44.397978 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.456989 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.457074 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.457085 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.457100 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.457111 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.561599 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.561708 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.561729 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.561758 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.561777 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.664374 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.664408 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.664416 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.664431 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.664440 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.767531 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.767577 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.767590 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.767608 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.767621 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.844097 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/3.log" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.845222 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/2.log" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.849209 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" exitCode=1 Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.849265 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.849314 4816 scope.go:117] "RemoveContainer" containerID="f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.850627 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:04:44 crc kubenswrapper[4816]: E0216 13:04:44.851006 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.870606 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.870702 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.870721 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.870746 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.870768 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.874756 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.887095 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcfe164a-55bf-4189-9193-9380765c1cce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c57fdd5d951392300116c23555a51129532c6b178080c46e40459ab3eb1b9ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac42763639ae9438ba636a65dbb950a49e2a137e92eeb9641500a1bc45e3c92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ac42763639ae9438ba636a65dbb950a49e2a137e92eeb9641500a1bc45e3c92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.916474 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.938164 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.958016 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.973127 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.973172 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.973188 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.973209 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.973223 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:44Z","lastTransitionTime":"2026-02-16T13:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.975832 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:44 crc kubenswrapper[4816]: I0216 13:04:44.993739 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:44Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.011038 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.030428 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.047728 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.062523 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.076532 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.076593 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.076616 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.076646 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.076700 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:45Z","lastTransitionTime":"2026-02-16T13:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.076742 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.096450 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:33Z\\\",\\\"message\\\":\\\"2026-02-16T13:03:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843\\\\n2026-02-16T13:03:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843 to /host/opt/cni/bin/\\\\n2026-02-16T13:03:48Z [verbose] multus-daemon started\\\\n2026-02-16T13:03:48Z [verbose] Readiness Indicator file check\\\\n2026-02-16T13:04:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:04:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.112395 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.133636 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.148164 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.164030 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.176867 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177058 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.177019736 +0000 UTC m=+148.503733504 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.177134 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.177214 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.177302 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177326 4816 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177349 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177379 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.177382 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177398 4816 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177426 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.177398006 +0000 UTC m=+148.504111774 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177466 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.177443117 +0000 UTC m=+148.504156875 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177558 4816 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177568 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177604 4816 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177624 4816 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177685 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.177612402 +0000 UTC m=+148.504326230 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.177737 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.177719844 +0000 UTC m=+148.504433612 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.179442 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.179483 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.179495 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.179511 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.179893 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:45Z","lastTransitionTime":"2026-02-16T13:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.186638 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0638546196cffd0944d2c47ac22f4330d2d08e1a7101ce038385e967ce844fc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:12Z\\\",\\\"message\\\":\\\"sions/factory.go:140\\\\nI0216 13:04:12.270266 6507 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0216 13:04:12.270460 6507 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0216 13:04:12.270548 6507 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0216 13:04:12.270921 6507 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0216 13:04:12.270960 6507 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0216 13:04:12.270969 6507 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0216 13:04:12.270992 6507 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0216 13:04:12.271018 6507 factory.go:656] Stopping watch factory\\\\nI0216 13:04:12.271046 6507 ovnkube.go:599] Stopped ovnkube\\\\nI0216 13:04:12.271076 6507 handler.go:208] Removed *v1.Node event handler 2\\\\nI0216 13:04:12.271111 6507 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0216 13:04:12.271114 6507 handler.go:208] Removed *v1.Node event handler 7\\\\nI0216 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:44Z\\\",\\\"message\\\":\\\"LB{Name:\\\\\\\"Service_openshift-marketplace/community-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"d389393c-7ba9-422c-b3f5-06e391d537d2\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/community-operators\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/community-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/community-operators\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.189\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0216 13:04:44.378951 6949 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.201581 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.281880 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.281943 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.281957 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.281977 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.281995 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:45Z","lastTransitionTime":"2026-02-16T13:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.385161 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.385232 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.385255 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.385282 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.385302 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:45Z","lastTransitionTime":"2026-02-16T13:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.392548 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 12:11:57.512522782 +0000 UTC Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.398043 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.398104 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.398154 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.398466 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.398574 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.398351 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.488539 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.488615 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.488687 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.488719 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.488740 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:45Z","lastTransitionTime":"2026-02-16T13:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.591615 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.591689 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.591707 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.591729 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.591745 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:45Z","lastTransitionTime":"2026-02-16T13:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.695220 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.695265 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.695277 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.695293 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.695306 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:45Z","lastTransitionTime":"2026-02-16T13:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.799007 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.799060 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.799073 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.799092 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.799104 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:45Z","lastTransitionTime":"2026-02-16T13:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.855100 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/3.log" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.859600 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:04:45 crc kubenswrapper[4816]: E0216 13:04:45.859817 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.878245 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.894881 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.901961 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.902011 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.902270 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.902295 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.902312 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:45Z","lastTransitionTime":"2026-02-16T13:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.911124 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.928239 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.943327 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.958436 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.977173 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:45 crc kubenswrapper[4816]: I0216 13:04:45.991449 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:45Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.005943 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.006059 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.006082 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.006125 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.006146 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.008900 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.028714 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:33Z\\\",\\\"message\\\":\\\"2026-02-16T13:03:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843\\\\n2026-02-16T13:03:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843 to /host/opt/cni/bin/\\\\n2026-02-16T13:03:48Z [verbose] multus-daemon started\\\\n2026-02-16T13:03:48Z [verbose] Readiness Indicator file check\\\\n2026-02-16T13:04:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:04:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.047483 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.064279 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.085508 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:44Z\\\",\\\"message\\\":\\\"LB{Name:\\\\\\\"Service_openshift-marketplace/community-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"d389393c-7ba9-422c-b3f5-06e391d537d2\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/community-operators\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/community-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/community-operators\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.189\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0216 13:04:44.378951 6949 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.100518 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.107968 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.108019 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.108031 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.108048 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.108061 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.117102 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.131259 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcfe164a-55bf-4189-9193-9380765c1cce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c57fdd5d951392300116c23555a51129532c6b178080c46e40459ab3eb1b9ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac42763639ae9438ba636a65dbb950a49e2a137e92eeb9641500a1bc45e3c92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ac42763639ae9438ba636a65dbb950a49e2a137e92eeb9641500a1bc45e3c92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.154034 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.169509 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.180952 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:46Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.210762 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.210817 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.210828 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.210854 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.210868 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.313689 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.314079 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.314214 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.314342 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.314477 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.392790 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 11:40:33.535110292 +0000 UTC Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.398021 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:46 crc kubenswrapper[4816]: E0216 13:04:46.398160 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.417311 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.417433 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.417492 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.417559 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.417633 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.520222 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.520557 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.520733 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.520885 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.521049 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.624353 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.624440 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.624459 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.624484 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.624501 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.728314 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.728393 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.728414 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.728440 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.728459 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.831616 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.831753 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.831793 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.831871 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.831899 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.934545 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.934881 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.934993 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.935092 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:46 crc kubenswrapper[4816]: I0216 13:04:46.935178 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:46Z","lastTransitionTime":"2026-02-16T13:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.037689 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.037980 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.038228 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.038338 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.038429 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:47Z","lastTransitionTime":"2026-02-16T13:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.182579 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.182626 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.182645 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.182729 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.182760 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:47Z","lastTransitionTime":"2026-02-16T13:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.286116 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.286170 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.286189 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.286212 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.286229 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:47Z","lastTransitionTime":"2026-02-16T13:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.388591 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.388646 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.388680 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.388701 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.388714 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:47Z","lastTransitionTime":"2026-02-16T13:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.393726 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 09:28:21.295892205 +0000 UTC Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.398180 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.398263 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.398401 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:47 crc kubenswrapper[4816]: E0216 13:04:47.398510 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:47 crc kubenswrapper[4816]: E0216 13:04:47.398578 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:47 crc kubenswrapper[4816]: E0216 13:04:47.398776 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.491584 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.491701 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.491761 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.491787 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.491809 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:47Z","lastTransitionTime":"2026-02-16T13:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.595404 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.595448 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.595460 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.595475 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.595486 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:47Z","lastTransitionTime":"2026-02-16T13:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.698460 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.698496 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.698508 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.698524 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.698535 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:47Z","lastTransitionTime":"2026-02-16T13:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.801984 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.802047 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.802063 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.802086 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.802104 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:47Z","lastTransitionTime":"2026-02-16T13:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.904858 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.904978 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.905003 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.905036 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:47 crc kubenswrapper[4816]: I0216 13:04:47.905062 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:47Z","lastTransitionTime":"2026-02-16T13:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.008363 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.008453 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.008480 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.008512 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.008534 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.110757 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.110792 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.110801 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.110814 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.110824 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.213347 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.213389 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.213401 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.213419 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.213430 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.315842 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.315884 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.315896 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.315912 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.315923 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.393958 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 08:49:22.139059196 +0000 UTC Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.398168 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:48 crc kubenswrapper[4816]: E0216 13:04:48.398308 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.418185 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.418227 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.418237 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.418255 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.418265 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.521160 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.521195 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.521210 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.521229 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.521243 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.623495 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.623543 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.623558 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.623576 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.623588 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.725363 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.725401 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.725409 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.725423 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.725432 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.829039 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.829100 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.829117 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.829142 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.829159 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.932980 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.933049 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.933072 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.933102 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:48 crc kubenswrapper[4816]: I0216 13:04:48.933123 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:48Z","lastTransitionTime":"2026-02-16T13:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.036348 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.036411 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.036429 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.036452 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.036472 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.140608 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.140715 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.140742 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.140772 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.140796 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.245112 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.245593 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.245617 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.245648 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.245702 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.247684 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.247724 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.247736 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.247755 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.247767 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: E0216 13:04:49.268878 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.273528 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.273580 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.273597 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.273618 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.273632 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: E0216 13:04:49.291919 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.297791 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.297825 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.297837 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.297855 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.297870 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: E0216 13:04:49.317926 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.321049 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.321088 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.321099 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.321118 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.321130 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: E0216 13:04:49.335090 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.338715 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.338780 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.338799 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.339229 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.339290 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: E0216 13:04:49.354904 4816 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8161f8f3-f9d5-417b-b4d7-3e1ca8231ceb\\\",\\\"systemUUID\\\":\\\"a11dc06c-84b8-49d2-9e87-a6202bfe8ca0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:49Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:49 crc kubenswrapper[4816]: E0216 13:04:49.355153 4816 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.357117 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.357162 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.357179 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.357200 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.357216 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.394745 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 17:15:02.674483677 +0000 UTC Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.398272 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:49 crc kubenswrapper[4816]: E0216 13:04:49.398651 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.398721 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:49 crc kubenswrapper[4816]: E0216 13:04:49.398857 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.399079 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:49 crc kubenswrapper[4816]: E0216 13:04:49.399236 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.461075 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.461121 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.461137 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.461162 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.461179 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.564312 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.564359 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.564371 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.564389 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.564401 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.667038 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.667088 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.667104 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.667125 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.667144 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.769991 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.770068 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.770094 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.770123 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.770141 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.874211 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.874278 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.874295 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.874320 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.874338 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.977967 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.978030 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.978048 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.978073 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:49 crc kubenswrapper[4816]: I0216 13:04:49.978090 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:49Z","lastTransitionTime":"2026-02-16T13:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.081466 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.081544 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.081562 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.081942 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.082164 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:50Z","lastTransitionTime":"2026-02-16T13:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.185624 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.185716 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.185735 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.185759 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.185784 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:50Z","lastTransitionTime":"2026-02-16T13:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.288697 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.288784 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.288809 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.288841 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.288864 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:50Z","lastTransitionTime":"2026-02-16T13:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.391633 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.391714 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.391726 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.391763 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.391774 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:50Z","lastTransitionTime":"2026-02-16T13:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.394919 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 14:17:10.215473288 +0000 UTC Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.398257 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:50 crc kubenswrapper[4816]: E0216 13:04:50.398462 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.494939 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.495021 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.495046 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.495076 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.495099 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:50Z","lastTransitionTime":"2026-02-16T13:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.598162 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.598230 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.598247 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.598274 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.598292 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:50Z","lastTransitionTime":"2026-02-16T13:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.701959 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.702008 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.702020 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.702038 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.702049 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:50Z","lastTransitionTime":"2026-02-16T13:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.804965 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.805043 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.805061 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.805085 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.805103 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:50Z","lastTransitionTime":"2026-02-16T13:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.913017 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.913395 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.913558 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.913780 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:50 crc kubenswrapper[4816]: I0216 13:04:50.914509 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:50Z","lastTransitionTime":"2026-02-16T13:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.017768 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.018122 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.018246 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.018392 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.018493 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.122427 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.122519 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.122536 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.122559 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.122577 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.225801 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.225936 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.225958 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.226010 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.226027 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.328590 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.328696 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.328707 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.328722 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.328733 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.395103 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 16:14:37.780330762 +0000 UTC Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.398475 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:51 crc kubenswrapper[4816]: E0216 13:04:51.398633 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.398813 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:51 crc kubenswrapper[4816]: E0216 13:04:51.398992 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.399796 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:51 crc kubenswrapper[4816]: E0216 13:04:51.399999 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.419366 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.432738 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.432805 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.432826 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.432856 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.432877 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.433066 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlv2n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aee6f17c-ce25-4b7b-86c9-65d9186d9eb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98711acf96ccacbd0040f284cfb8f0328d34cf870a91be31d93b20989dee6816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m9vf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlv2n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.452541 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84695303-3a5e-4393-9721-39e156759f00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"le observer\\\\nW0216 13:03:41.018565 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0216 13:03:41.018810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0216 13:03:41.019955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3739639842/tls.crt::/tmp/serving-cert-3739639842/tls.key\\\\\\\"\\\\nI0216 13:03:41.301104 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0216 13:03:41.308283 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0216 13:03:41.308459 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0216 13:03:41.308551 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0216 13:03:41.308622 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0216 13:03:41.321042 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0216 13:03:41.321087 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321099 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0216 13:03:41.321109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0216 13:03:41.321115 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0216 13:03:41.321122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0216 13:03:41.321128 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0216 13:03:41.321363 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0216 13:03:41.323942 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.462376 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcfe164a-55bf-4189-9193-9380765c1cce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c57fdd5d951392300116c23555a51129532c6b178080c46e40459ab3eb1b9ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ac42763639ae9438ba636a65dbb950a49e2a137e92eeb9641500a1bc45e3c92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ac42763639ae9438ba636a65dbb950a49e2a137e92eeb9641500a1bc45e3c92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.481787 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ef046f9-1559-462e-9cef-c76ffcc40973\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af3336dcdac99c5f24d7a2c603d0bd1bd105871bdb3ea06fce0be3ad9bb31f30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a43618480dd584cf79ec593de958d693a93828d4a1311fcd6a68a3a60a410a4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c0f628bae4bbe1b9bb262a1cb6f03de29f149639c10c2c906e6f2f7ec8ab29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71eb1c308c5e6c2f2cf434c86883f4558b8713b64b1bc1e1c048448ebd472b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0b57c590e10b6d6bed1f60085f27598033b2be7b81a3399bdf95c063d10c859\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5843c494fc72cb37d54372c357328e7488142f1213bdcc6ee58b53fd1eacf118\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6339f030e5abc9c3331f24c8b9fb4a28fdafab3f0723ecd691e2a0fa6b4b86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b89aac4704380dd685da2dbcb9552122b0d7d16429e80af310a459411a4a78d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.501286 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efc69d85-6ec3-4a88-a99b-970e4f552369\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c223c9b772ae2daec4e12f2790956130cfac5d7076b295aba02d9c5f376cded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0000a7ab2ca1c8e239337808fb2bd8c7942de876b48f412637a0964acf045e9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://12da50a489526e280ce0be60b170d38cf80a2b3000b998ea7e4c40c13228a470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a90538cd8b0c2fb6a0599f8b7f92af51d870f82f6fc220610f970ae7783c47b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.514626 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.527870 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb19d695-8c09-42cc-bc34-940019ab38dc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a17fc62139deeaf84bab65c299da10da98422d8fcfe4e4bc2a398aef0d5874b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lcfgf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f95nc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.535838 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.535883 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.535894 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.535908 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.535917 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.538281 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-r5z5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ca840ef-b22e-486a-8720-a7886da10917\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dad42d73cf5d66a6f117be51917b8eaa4d0019edd5572abbffc2eb55df8a3326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft75l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-r5z5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.552811 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-69xcw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a58f937-7095-4c3c-b401-3a68ae936b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:33Z\\\",\\\"message\\\":\\\"2026-02-16T13:03:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843\\\\n2026-02-16T13:03:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8ce57852-0091-4f50-9948-0919e0038843 to /host/opt/cni/bin/\\\\n2026-02-16T13:03:48Z [verbose] multus-daemon started\\\\n2026-02-16T13:03:48Z [verbose] Readiness Indicator file check\\\\n2026-02-16T13:04:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:04:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wdz55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-69xcw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.572043 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-flb2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7f7924e-16c8-423a-99b7-b480f927bef2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7087918900b7754e88995e9dc8f4eda74f9d57eee474193b2067da315aae1b6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b3618137c62ca7df9d16578f5e431fed7df37d83faadb86ff6b973886260e8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ce4f85809db96a3e59d5ad4763b9fe0fdb491e7fd4f64233ddf638187e707ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f649c160cb300646c9c359b4a5dd53fb5cbe3dcfb6c80cc1703ff4fb945a637\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://306c8bfc6cd3e6f9926fd6938e33f0c10634dd797a18f44e0174b3a3294d218e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88936a34704970dc4447b21bd052912b554b363374e58945d7ebef2b1a9b256d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f54a8364ea8948fe9d2713cbee082b102a986685431e32df7869b3191b6452e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mfwvv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-flb2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.583513 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gfwts" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108200fc-f37f-4d80-bd46-314679989e11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:04:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-766fq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:04:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gfwts\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.596375 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c32212a-65e5-459a-a26d-ede1aeacbae5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c21d2f6faad37688a19b1438e8472d4144f57f71b74b925f7c192156352ca86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd0a02431799bb835e8e61e78f089fd6fd870bc0978b9c4851bb744438e2bba3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70acb6760d3188eaaf2c0ae417474e60265b8206277be0aee026a6f4515792dc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:21Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.613207 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.630422 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce683cc1d0b0e31fe495ddb264fcfc32a518b9e5cc306b436b0176598746c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.638483 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.638555 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.638573 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.638602 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.638620 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.644925 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfac4297-e728-4f95-8336-d2830bc552b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54bff10fea44dc5a0d34bfc82fa20299987541f9b1eb39326efe8bcebeaa1ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c26ab44776cb576b1a226c3e82796313756e81604c03a01467908860183a9c1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54f9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qfsj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.661104 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7600a254462472c70b094e0f64fa52ab21bfe98aa72f998f095f9f5079dbf80e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.679175 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db453c2dbca15049da6aaf30da312bcf08d136deccf3e03a56c2a095a088feb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bdc1ffc3cb668a1ff87f3677f3bcbb5d4ba21bbec7d203793041a6b9dcc9ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.707449 4816 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d1c53ef-b268-431b-bdb8-49f45d0715f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-16T13:03:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-16T13:04:44Z\\\",\\\"message\\\":\\\"LB{Name:\\\\\\\"Service_openshift-marketplace/community-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"d389393c-7ba9-422c-b3f5-06e391d537d2\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/community-operators\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/community-operators_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/community-operators\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.189\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0216 13:04:44.378951 6949 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-16T13:04:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-16T13:03:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-16T13:03:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-16T13:03:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dc5ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-16T13:03:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-s2hth\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-16T13:04:51Z is after 2025-08-24T17:21:41Z" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.741874 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.741946 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.741971 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.741996 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.742017 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.845247 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.845333 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.845353 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.845407 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.845426 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.948616 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.948734 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.948756 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.948787 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:51 crc kubenswrapper[4816]: I0216 13:04:51.948815 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:51Z","lastTransitionTime":"2026-02-16T13:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.051889 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.051956 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.051982 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.052009 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.052026 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.154619 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.154708 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.154735 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.154760 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.154778 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.257601 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.257703 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.257728 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.257758 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.257784 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.361103 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.361181 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.361203 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.361233 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.361257 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.395727 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 19:06:54.927197348 +0000 UTC Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.398081 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:52 crc kubenswrapper[4816]: E0216 13:04:52.398227 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.463289 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.463342 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.463357 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.463376 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.463390 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.566022 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.566178 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.566206 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.566235 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.566256 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.668574 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.668651 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.668711 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.668741 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.668765 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.772262 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.772336 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.772359 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.772386 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.772407 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.874987 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.875065 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.875088 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.875114 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.875131 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.977761 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.977842 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.977871 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.977900 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:52 crc kubenswrapper[4816]: I0216 13:04:52.977922 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:52Z","lastTransitionTime":"2026-02-16T13:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.081232 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.081288 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.081305 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.081327 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.081346 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:53Z","lastTransitionTime":"2026-02-16T13:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.184607 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.184720 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.184745 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.184772 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.184828 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:53Z","lastTransitionTime":"2026-02-16T13:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.287934 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.288009 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.288032 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.288061 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.288081 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:53Z","lastTransitionTime":"2026-02-16T13:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.391329 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.391379 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.391399 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.391421 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.391437 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:53Z","lastTransitionTime":"2026-02-16T13:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.395856 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 16:23:12.920273087 +0000 UTC Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.398299 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.398317 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.398502 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:53 crc kubenswrapper[4816]: E0216 13:04:53.398774 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:53 crc kubenswrapper[4816]: E0216 13:04:53.398877 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:53 crc kubenswrapper[4816]: E0216 13:04:53.399015 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.494969 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.495058 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.495077 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.495099 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.495117 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:53Z","lastTransitionTime":"2026-02-16T13:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.597234 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.597299 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.597310 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.597326 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.597364 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:53Z","lastTransitionTime":"2026-02-16T13:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.700730 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.700781 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.700798 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.700821 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.700837 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:53Z","lastTransitionTime":"2026-02-16T13:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.803866 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.803909 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.803922 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.803939 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.803950 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:53Z","lastTransitionTime":"2026-02-16T13:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.907864 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.907946 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.907971 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.907999 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:53 crc kubenswrapper[4816]: I0216 13:04:53.908017 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:53Z","lastTransitionTime":"2026-02-16T13:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.011016 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.011052 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.011060 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.011074 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.011082 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.114316 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.114388 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.114411 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.114442 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.114466 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.216613 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.216688 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.216704 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.216722 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.216735 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.320279 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.320375 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.320397 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.320419 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.320437 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.396720 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 17:35:51.65250954 +0000 UTC Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.397882 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:54 crc kubenswrapper[4816]: E0216 13:04:54.398030 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.423791 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.423869 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.423893 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.423924 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.423943 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.527817 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.527875 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.527885 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.527904 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.527916 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.631481 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.631549 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.631570 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.631595 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.631613 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.734342 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.734449 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.734469 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.734528 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.734549 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.837819 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.837872 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.837890 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.837912 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.837929 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.942606 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.942697 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.942722 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.942750 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:54 crc kubenswrapper[4816]: I0216 13:04:54.942767 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:54Z","lastTransitionTime":"2026-02-16T13:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.046692 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.046731 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.046741 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.046759 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.046772 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.150411 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.150473 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.150493 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.150518 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.150537 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.253703 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.253759 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.253773 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.253792 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.253805 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.358532 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.358595 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.358612 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.358639 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.358689 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.397894 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 07:50:00.368894613 +0000 UTC Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.398048 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.398075 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.398093 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:55 crc kubenswrapper[4816]: E0216 13:04:55.398215 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:55 crc kubenswrapper[4816]: E0216 13:04:55.398387 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:55 crc kubenswrapper[4816]: E0216 13:04:55.398518 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.461935 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.461996 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.462012 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.462037 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.462057 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.565706 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.565843 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.565874 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.565905 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.565923 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.669962 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.670034 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.670058 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.670088 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.670114 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.773313 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.773403 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.773420 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.773471 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.773490 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.876731 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.876781 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.876797 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.876823 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.876840 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.979372 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.979440 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.979462 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.979491 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:55 crc kubenswrapper[4816]: I0216 13:04:55.979511 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:55Z","lastTransitionTime":"2026-02-16T13:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.082357 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.082450 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.082468 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.082488 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.082504 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:56Z","lastTransitionTime":"2026-02-16T13:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.186356 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.186949 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.187069 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.187183 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.187306 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:56Z","lastTransitionTime":"2026-02-16T13:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.290716 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.290801 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.290816 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.290834 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.290847 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:56Z","lastTransitionTime":"2026-02-16T13:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.393888 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.394316 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.394460 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.394648 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.394868 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:56Z","lastTransitionTime":"2026-02-16T13:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.398202 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 20:53:29.693167363 +0000 UTC Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.398220 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:56 crc kubenswrapper[4816]: E0216 13:04:56.398726 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.398973 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:04:56 crc kubenswrapper[4816]: E0216 13:04:56.399251 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.498034 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.498109 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.498128 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.498156 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.498176 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:56Z","lastTransitionTime":"2026-02-16T13:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.601408 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.601814 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.601845 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.601875 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.601897 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:56Z","lastTransitionTime":"2026-02-16T13:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.705002 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.705047 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.705063 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.705086 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.705103 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:56Z","lastTransitionTime":"2026-02-16T13:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.808026 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.808090 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.808109 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.808132 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.808149 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:56Z","lastTransitionTime":"2026-02-16T13:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.910605 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.910687 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.910706 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.910731 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:56 crc kubenswrapper[4816]: I0216 13:04:56.910747 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:56Z","lastTransitionTime":"2026-02-16T13:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.013102 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.013136 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.013144 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.013157 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.013167 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.115839 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.115880 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.115892 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.115908 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.115919 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.218367 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.218403 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.218413 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.218428 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.218441 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.325860 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.325912 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.325929 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.325949 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.325964 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.397756 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.397770 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.398096 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:57 crc kubenswrapper[4816]: E0216 13:04:57.397967 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:57 crc kubenswrapper[4816]: E0216 13:04:57.398203 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:57 crc kubenswrapper[4816]: E0216 13:04:57.398278 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.398309 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 14:50:41.630958329 +0000 UTC Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.428597 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.428649 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.428693 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.428718 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.428736 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.531510 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.531570 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.531587 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.531613 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.531629 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.635225 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.635289 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.635310 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.635339 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.635361 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.738811 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.738878 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.738891 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.738910 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.738925 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.842023 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.842108 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.842131 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.842158 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.842177 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.945852 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.945928 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.945954 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.945988 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:57 crc kubenswrapper[4816]: I0216 13:04:57.946009 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:57Z","lastTransitionTime":"2026-02-16T13:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.049117 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.049181 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.049205 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.049234 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.049256 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.151697 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.151756 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.151772 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.151794 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.151808 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.255330 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.255400 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.255417 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.255444 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.255461 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.359180 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.359233 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.359250 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.359275 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.359292 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.398319 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:04:58 crc kubenswrapper[4816]: E0216 13:04:58.398568 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.399044 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 21:36:21.80143944 +0000 UTC Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.462082 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.462135 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.462154 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.462177 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.462194 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.565268 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.565316 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.565327 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.565348 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.565360 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.667623 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.667715 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.667728 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.667770 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.667783 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.771307 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.771375 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.771395 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.771419 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.771436 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.875010 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.875059 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.875074 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.875091 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.875106 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.977737 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.977791 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.977808 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.977830 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:58 crc kubenswrapper[4816]: I0216 13:04:58.977846 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:58Z","lastTransitionTime":"2026-02-16T13:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.080719 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.080767 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.080777 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.080793 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.080805 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:59Z","lastTransitionTime":"2026-02-16T13:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.183420 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.183480 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.183497 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.183522 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.183539 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:59Z","lastTransitionTime":"2026-02-16T13:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.286631 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.286763 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.286801 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.286835 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.286856 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:59Z","lastTransitionTime":"2026-02-16T13:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.389388 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.389444 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.389452 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.389466 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.389477 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:59Z","lastTransitionTime":"2026-02-16T13:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.397862 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.397949 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:04:59 crc kubenswrapper[4816]: E0216 13:04:59.397975 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.398117 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:04:59 crc kubenswrapper[4816]: E0216 13:04:59.398252 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:04:59 crc kubenswrapper[4816]: E0216 13:04:59.398396 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.399272 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 06:49:44.737963819 +0000 UTC Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.491449 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.491510 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.491529 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.491552 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.491569 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:59Z","lastTransitionTime":"2026-02-16T13:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.594264 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.594326 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.594342 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.594364 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.594381 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:59Z","lastTransitionTime":"2026-02-16T13:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.692682 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.692718 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.692727 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.692743 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.692754 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:59Z","lastTransitionTime":"2026-02-16T13:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.721547 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.721599 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.721616 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.721639 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.721673 4816 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-16T13:04:59Z","lastTransitionTime":"2026-02-16T13:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.754210 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4"] Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.754691 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.756807 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.757573 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.757889 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.758150 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.800086 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.800066021 podStartE2EDuration="1m18.800066021s" podCreationTimestamp="2026-02-16 13:03:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:04:59.782877553 +0000 UTC m=+99.109591331" watchObservedRunningTime="2026-02-16 13:04:59.800066021 +0000 UTC m=+99.126779759" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.800630 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=17.800625226 podStartE2EDuration="17.800625226s" podCreationTimestamp="2026-02-16 13:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:04:59.80003948 +0000 UTC m=+99.126753248" watchObservedRunningTime="2026-02-16 13:04:59.800625226 +0000 UTC m=+99.127338964" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.835521 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=78.835497127 podStartE2EDuration="1m18.835497127s" podCreationTimestamp="2026-02-16 13:03:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:04:59.834595761 +0000 UTC m=+99.161309529" watchObservedRunningTime="2026-02-16 13:04:59.835497127 +0000 UTC m=+99.162210895" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.842846 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9276d07f-dac2-4673-879c-a5a888f65d8a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.842892 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9276d07f-dac2-4673-879c-a5a888f65d8a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.842922 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9276d07f-dac2-4673-879c-a5a888f65d8a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.842958 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9276d07f-dac2-4673-879c-a5a888f65d8a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.843058 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9276d07f-dac2-4673-879c-a5a888f65d8a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.884045 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-nlv2n" podStartSLOduration=74.884025919 podStartE2EDuration="1m14.884025919s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:04:59.871134567 +0000 UTC m=+99.197848315" watchObservedRunningTime="2026-02-16 13:04:59.884025919 +0000 UTC m=+99.210739637" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.904877 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=50.904858096 podStartE2EDuration="50.904858096s" podCreationTimestamp="2026-02-16 13:04:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:04:59.884466671 +0000 UTC m=+99.211180469" watchObservedRunningTime="2026-02-16 13:04:59.904858096 +0000 UTC m=+99.231571824" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.926398 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podStartSLOduration=74.926356692 podStartE2EDuration="1m14.926356692s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:04:59.92553402 +0000 UTC m=+99.252247768" watchObservedRunningTime="2026-02-16 13:04:59.926356692 +0000 UTC m=+99.253070460" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.943689 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9276d07f-dac2-4673-879c-a5a888f65d8a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.943731 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9276d07f-dac2-4673-879c-a5a888f65d8a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.943792 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9276d07f-dac2-4673-879c-a5a888f65d8a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.943812 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9276d07f-dac2-4673-879c-a5a888f65d8a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.943833 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9276d07f-dac2-4673-879c-a5a888f65d8a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.943895 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9276d07f-dac2-4673-879c-a5a888f65d8a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.944396 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9276d07f-dac2-4673-879c-a5a888f65d8a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.944787 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9276d07f-dac2-4673-879c-a5a888f65d8a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.950840 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9276d07f-dac2-4673-879c-a5a888f65d8a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.968295 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9276d07f-dac2-4673-879c-a5a888f65d8a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9jmt4\" (UID: \"9276d07f-dac2-4673-879c-a5a888f65d8a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.980491 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-flb2w" podStartSLOduration=74.980467476 podStartE2EDuration="1m14.980467476s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:04:59.970441924 +0000 UTC m=+99.297155662" watchObservedRunningTime="2026-02-16 13:04:59.980467476 +0000 UTC m=+99.307181204" Feb 16 13:04:59 crc kubenswrapper[4816]: I0216 13:04:59.994814 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.994787346 podStartE2EDuration="1m18.994787346s" podCreationTimestamp="2026-02-16 13:03:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:04:59.994066917 +0000 UTC m=+99.320780645" watchObservedRunningTime="2026-02-16 13:04:59.994787346 +0000 UTC m=+99.321501104" Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.044148 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-r5z5t" podStartSLOduration=75.044130391 podStartE2EDuration="1m15.044130391s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:00.043740811 +0000 UTC m=+99.370454549" watchObservedRunningTime="2026-02-16 13:05:00.044130391 +0000 UTC m=+99.370844119" Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.064442 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-69xcw" podStartSLOduration=75.064421534 podStartE2EDuration="1m15.064421534s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:00.063999433 +0000 UTC m=+99.390713161" watchObservedRunningTime="2026-02-16 13:05:00.064421534 +0000 UTC m=+99.391135272" Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.075277 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.397838 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:00 crc kubenswrapper[4816]: E0216 13:05:00.398301 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.399429 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 19:19:30.361730416 +0000 UTC Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.399523 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.407565 4816 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.914627 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" event={"ID":"9276d07f-dac2-4673-879c-a5a888f65d8a","Type":"ContainerStarted","Data":"bd720e77552ed676d0e6f1e921c35a5656057050ffe1244ce3b495cfc37255c6"} Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.914696 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" event={"ID":"9276d07f-dac2-4673-879c-a5a888f65d8a","Type":"ContainerStarted","Data":"4847f0edb3763903b25e82f13b4185df6fe58e3b3ec67cdb05a584472dae8e18"} Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.931912 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9jmt4" podStartSLOduration=75.931877281 podStartE2EDuration="1m15.931877281s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:00.931864641 +0000 UTC m=+100.258578369" watchObservedRunningTime="2026-02-16 13:05:00.931877281 +0000 UTC m=+100.258591049" Feb 16 13:05:00 crc kubenswrapper[4816]: I0216 13:05:00.933702 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qfsj9" podStartSLOduration=74.93364837 podStartE2EDuration="1m14.93364837s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:00.150420338 +0000 UTC m=+99.477134066" watchObservedRunningTime="2026-02-16 13:05:00.93364837 +0000 UTC m=+100.260362188" Feb 16 13:05:01 crc kubenswrapper[4816]: I0216 13:05:01.398150 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:01 crc kubenswrapper[4816]: I0216 13:05:01.398194 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:01 crc kubenswrapper[4816]: E0216 13:05:01.399860 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:01 crc kubenswrapper[4816]: E0216 13:05:01.400018 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:01 crc kubenswrapper[4816]: I0216 13:05:01.400170 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:01 crc kubenswrapper[4816]: E0216 13:05:01.400407 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:02 crc kubenswrapper[4816]: I0216 13:05:02.397897 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:02 crc kubenswrapper[4816]: E0216 13:05:02.398092 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:03 crc kubenswrapper[4816]: I0216 13:05:03.397943 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:03 crc kubenswrapper[4816]: I0216 13:05:03.397955 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:03 crc kubenswrapper[4816]: I0216 13:05:03.398016 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:03 crc kubenswrapper[4816]: E0216 13:05:03.399542 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:03 crc kubenswrapper[4816]: E0216 13:05:03.399643 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:03 crc kubenswrapper[4816]: E0216 13:05:03.399970 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:04 crc kubenswrapper[4816]: I0216 13:05:04.201224 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:04 crc kubenswrapper[4816]: E0216 13:05:04.201404 4816 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:05:04 crc kubenswrapper[4816]: E0216 13:05:04.201465 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs podName:108200fc-f37f-4d80-bd46-314679989e11 nodeName:}" failed. No retries permitted until 2026-02-16 13:06:08.201450014 +0000 UTC m=+167.528163742 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs") pod "network-metrics-daemon-gfwts" (UID: "108200fc-f37f-4d80-bd46-314679989e11") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 16 13:05:04 crc kubenswrapper[4816]: I0216 13:05:04.397984 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:04 crc kubenswrapper[4816]: E0216 13:05:04.398109 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:05 crc kubenswrapper[4816]: I0216 13:05:05.398731 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:05 crc kubenswrapper[4816]: I0216 13:05:05.398813 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:05 crc kubenswrapper[4816]: I0216 13:05:05.398747 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:05 crc kubenswrapper[4816]: E0216 13:05:05.398951 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:05 crc kubenswrapper[4816]: E0216 13:05:05.399138 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:05 crc kubenswrapper[4816]: E0216 13:05:05.399263 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:06 crc kubenswrapper[4816]: I0216 13:05:06.398143 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:06 crc kubenswrapper[4816]: E0216 13:05:06.398321 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:07 crc kubenswrapper[4816]: I0216 13:05:07.398638 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:07 crc kubenswrapper[4816]: I0216 13:05:07.398832 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:07 crc kubenswrapper[4816]: E0216 13:05:07.398859 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:07 crc kubenswrapper[4816]: I0216 13:05:07.399866 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:05:07 crc kubenswrapper[4816]: E0216 13:05:07.400139 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" Feb 16 13:05:07 crc kubenswrapper[4816]: I0216 13:05:07.400345 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:07 crc kubenswrapper[4816]: E0216 13:05:07.400440 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:07 crc kubenswrapper[4816]: E0216 13:05:07.400698 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:08 crc kubenswrapper[4816]: I0216 13:05:08.398649 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:08 crc kubenswrapper[4816]: E0216 13:05:08.399415 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:09 crc kubenswrapper[4816]: I0216 13:05:09.398473 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:09 crc kubenswrapper[4816]: I0216 13:05:09.398636 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:09 crc kubenswrapper[4816]: I0216 13:05:09.398794 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:09 crc kubenswrapper[4816]: E0216 13:05:09.398794 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:09 crc kubenswrapper[4816]: E0216 13:05:09.398934 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:09 crc kubenswrapper[4816]: E0216 13:05:09.399048 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:10 crc kubenswrapper[4816]: I0216 13:05:10.398530 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:10 crc kubenswrapper[4816]: E0216 13:05:10.398853 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:11 crc kubenswrapper[4816]: I0216 13:05:11.398093 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:11 crc kubenswrapper[4816]: I0216 13:05:11.398232 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:11 crc kubenswrapper[4816]: E0216 13:05:11.400485 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:11 crc kubenswrapper[4816]: I0216 13:05:11.400835 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:11 crc kubenswrapper[4816]: E0216 13:05:11.401308 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:11 crc kubenswrapper[4816]: E0216 13:05:11.401383 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:12 crc kubenswrapper[4816]: I0216 13:05:12.397869 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:12 crc kubenswrapper[4816]: E0216 13:05:12.398023 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:13 crc kubenswrapper[4816]: I0216 13:05:13.398466 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:13 crc kubenswrapper[4816]: I0216 13:05:13.398557 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:13 crc kubenswrapper[4816]: I0216 13:05:13.398646 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:13 crc kubenswrapper[4816]: E0216 13:05:13.398871 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:13 crc kubenswrapper[4816]: E0216 13:05:13.399067 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:13 crc kubenswrapper[4816]: E0216 13:05:13.399271 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:14 crc kubenswrapper[4816]: I0216 13:05:14.397851 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:14 crc kubenswrapper[4816]: E0216 13:05:14.398236 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:15 crc kubenswrapper[4816]: I0216 13:05:15.398708 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:15 crc kubenswrapper[4816]: I0216 13:05:15.398650 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:15 crc kubenswrapper[4816]: E0216 13:05:15.399318 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:15 crc kubenswrapper[4816]: E0216 13:05:15.399464 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:15 crc kubenswrapper[4816]: I0216 13:05:15.398760 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:15 crc kubenswrapper[4816]: E0216 13:05:15.399569 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:16 crc kubenswrapper[4816]: I0216 13:05:16.397631 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:16 crc kubenswrapper[4816]: E0216 13:05:16.398317 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:17 crc kubenswrapper[4816]: I0216 13:05:17.399061 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:17 crc kubenswrapper[4816]: I0216 13:05:17.399130 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:17 crc kubenswrapper[4816]: I0216 13:05:17.399155 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:17 crc kubenswrapper[4816]: E0216 13:05:17.399330 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:17 crc kubenswrapper[4816]: E0216 13:05:17.399429 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:17 crc kubenswrapper[4816]: E0216 13:05:17.399491 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:18 crc kubenswrapper[4816]: I0216 13:05:18.398047 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:18 crc kubenswrapper[4816]: E0216 13:05:18.398263 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:19 crc kubenswrapper[4816]: I0216 13:05:19.398451 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:19 crc kubenswrapper[4816]: I0216 13:05:19.398531 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:19 crc kubenswrapper[4816]: I0216 13:05:19.398628 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:19 crc kubenswrapper[4816]: E0216 13:05:19.398846 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:19 crc kubenswrapper[4816]: E0216 13:05:19.398966 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:19 crc kubenswrapper[4816]: E0216 13:05:19.399178 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:19 crc kubenswrapper[4816]: I0216 13:05:19.976842 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/1.log" Feb 16 13:05:19 crc kubenswrapper[4816]: I0216 13:05:19.977173 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/0.log" Feb 16 13:05:19 crc kubenswrapper[4816]: I0216 13:05:19.977203 4816 generic.go:334] "Generic (PLEG): container finished" podID="2a58f937-7095-4c3c-b401-3a68ae936b86" containerID="a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd" exitCode=1 Feb 16 13:05:19 crc kubenswrapper[4816]: I0216 13:05:19.977231 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-69xcw" event={"ID":"2a58f937-7095-4c3c-b401-3a68ae936b86","Type":"ContainerDied","Data":"a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd"} Feb 16 13:05:19 crc kubenswrapper[4816]: I0216 13:05:19.977265 4816 scope.go:117] "RemoveContainer" containerID="06e984c934afa23058e9443e11a0c7045d4c62713593789983f3a0b44c24f191" Feb 16 13:05:19 crc kubenswrapper[4816]: I0216 13:05:19.978260 4816 scope.go:117] "RemoveContainer" containerID="a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd" Feb 16 13:05:19 crc kubenswrapper[4816]: E0216 13:05:19.978593 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-69xcw_openshift-multus(2a58f937-7095-4c3c-b401-3a68ae936b86)\"" pod="openshift-multus/multus-69xcw" podUID="2a58f937-7095-4c3c-b401-3a68ae936b86" Feb 16 13:05:20 crc kubenswrapper[4816]: I0216 13:05:20.398460 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:20 crc kubenswrapper[4816]: E0216 13:05:20.398623 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:20 crc kubenswrapper[4816]: I0216 13:05:20.981872 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/1.log" Feb 16 13:05:21 crc kubenswrapper[4816]: E0216 13:05:21.366847 4816 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Feb 16 13:05:21 crc kubenswrapper[4816]: I0216 13:05:21.398266 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:21 crc kubenswrapper[4816]: I0216 13:05:21.398421 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:21 crc kubenswrapper[4816]: I0216 13:05:21.399632 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:21 crc kubenswrapper[4816]: E0216 13:05:21.399789 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:21 crc kubenswrapper[4816]: E0216 13:05:21.399832 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:21 crc kubenswrapper[4816]: E0216 13:05:21.399920 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:21 crc kubenswrapper[4816]: I0216 13:05:21.400522 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:05:21 crc kubenswrapper[4816]: E0216 13:05:21.400691 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-s2hth_openshift-ovn-kubernetes(0d1c53ef-b268-431b-bdb8-49f45d0715f8)\"" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" Feb 16 13:05:21 crc kubenswrapper[4816]: E0216 13:05:21.505512 4816 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 16 13:05:22 crc kubenswrapper[4816]: I0216 13:05:22.398374 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:22 crc kubenswrapper[4816]: E0216 13:05:22.398539 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:23 crc kubenswrapper[4816]: I0216 13:05:23.397937 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:23 crc kubenswrapper[4816]: I0216 13:05:23.398034 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:23 crc kubenswrapper[4816]: E0216 13:05:23.398136 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:23 crc kubenswrapper[4816]: E0216 13:05:23.398210 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:23 crc kubenswrapper[4816]: I0216 13:05:23.398546 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:23 crc kubenswrapper[4816]: E0216 13:05:23.398859 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:24 crc kubenswrapper[4816]: I0216 13:05:24.398362 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:24 crc kubenswrapper[4816]: E0216 13:05:24.398490 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:25 crc kubenswrapper[4816]: I0216 13:05:25.397854 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:25 crc kubenswrapper[4816]: I0216 13:05:25.397922 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:25 crc kubenswrapper[4816]: I0216 13:05:25.397891 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:25 crc kubenswrapper[4816]: E0216 13:05:25.398077 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:25 crc kubenswrapper[4816]: E0216 13:05:25.398224 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:25 crc kubenswrapper[4816]: E0216 13:05:25.398285 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:26 crc kubenswrapper[4816]: I0216 13:05:26.397703 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:26 crc kubenswrapper[4816]: E0216 13:05:26.397918 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:26 crc kubenswrapper[4816]: E0216 13:05:26.507488 4816 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 16 13:05:27 crc kubenswrapper[4816]: I0216 13:05:27.397704 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:27 crc kubenswrapper[4816]: I0216 13:05:27.397748 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:27 crc kubenswrapper[4816]: I0216 13:05:27.397784 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:27 crc kubenswrapper[4816]: E0216 13:05:27.397907 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:27 crc kubenswrapper[4816]: E0216 13:05:27.398019 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:27 crc kubenswrapper[4816]: E0216 13:05:27.398266 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:28 crc kubenswrapper[4816]: I0216 13:05:28.398121 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:28 crc kubenswrapper[4816]: E0216 13:05:28.398292 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:29 crc kubenswrapper[4816]: I0216 13:05:29.398392 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:29 crc kubenswrapper[4816]: I0216 13:05:29.398504 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:29 crc kubenswrapper[4816]: E0216 13:05:29.398588 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:29 crc kubenswrapper[4816]: E0216 13:05:29.398726 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:29 crc kubenswrapper[4816]: I0216 13:05:29.398813 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:29 crc kubenswrapper[4816]: E0216 13:05:29.398943 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:30 crc kubenswrapper[4816]: I0216 13:05:30.397756 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:30 crc kubenswrapper[4816]: E0216 13:05:30.398162 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:31 crc kubenswrapper[4816]: I0216 13:05:31.398169 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:31 crc kubenswrapper[4816]: I0216 13:05:31.398238 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:31 crc kubenswrapper[4816]: I0216 13:05:31.398221 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:31 crc kubenswrapper[4816]: E0216 13:05:31.399082 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:31 crc kubenswrapper[4816]: E0216 13:05:31.399195 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:31 crc kubenswrapper[4816]: E0216 13:05:31.399320 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:31 crc kubenswrapper[4816]: E0216 13:05:31.508053 4816 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 16 13:05:32 crc kubenswrapper[4816]: I0216 13:05:32.398417 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:32 crc kubenswrapper[4816]: E0216 13:05:32.398592 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:33 crc kubenswrapper[4816]: I0216 13:05:33.397710 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:33 crc kubenswrapper[4816]: E0216 13:05:33.397857 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:33 crc kubenswrapper[4816]: I0216 13:05:33.397940 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:33 crc kubenswrapper[4816]: I0216 13:05:33.398095 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:33 crc kubenswrapper[4816]: E0216 13:05:33.398488 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:33 crc kubenswrapper[4816]: I0216 13:05:33.398565 4816 scope.go:117] "RemoveContainer" containerID="a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd" Feb 16 13:05:33 crc kubenswrapper[4816]: E0216 13:05:33.398752 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:33 crc kubenswrapper[4816]: I0216 13:05:33.398829 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:05:34 crc kubenswrapper[4816]: I0216 13:05:34.025818 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/3.log" Feb 16 13:05:34 crc kubenswrapper[4816]: I0216 13:05:34.027811 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerStarted","Data":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} Feb 16 13:05:34 crc kubenswrapper[4816]: I0216 13:05:34.028933 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:05:34 crc kubenswrapper[4816]: I0216 13:05:34.030983 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/1.log" Feb 16 13:05:34 crc kubenswrapper[4816]: I0216 13:05:34.031042 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-69xcw" event={"ID":"2a58f937-7095-4c3c-b401-3a68ae936b86","Type":"ContainerStarted","Data":"d5039fad603c0433bc27bccdf795af00d118b8c3d4eb02751a4cd317d59167a6"} Feb 16 13:05:34 crc kubenswrapper[4816]: I0216 13:05:34.062628 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podStartSLOduration=109.062606326 podStartE2EDuration="1m49.062606326s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:34.059216851 +0000 UTC m=+133.385930579" watchObservedRunningTime="2026-02-16 13:05:34.062606326 +0000 UTC m=+133.389320094" Feb 16 13:05:34 crc kubenswrapper[4816]: I0216 13:05:34.118890 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gfwts"] Feb 16 13:05:34 crc kubenswrapper[4816]: I0216 13:05:34.118984 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:34 crc kubenswrapper[4816]: E0216 13:05:34.119067 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:35 crc kubenswrapper[4816]: I0216 13:05:35.398075 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:35 crc kubenswrapper[4816]: E0216 13:05:35.398625 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 16 13:05:35 crc kubenswrapper[4816]: I0216 13:05:35.398214 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:35 crc kubenswrapper[4816]: I0216 13:05:35.398317 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:35 crc kubenswrapper[4816]: E0216 13:05:35.399003 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 16 13:05:35 crc kubenswrapper[4816]: I0216 13:05:35.398126 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:35 crc kubenswrapper[4816]: E0216 13:05:35.399177 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gfwts" podUID="108200fc-f37f-4d80-bd46-314679989e11" Feb 16 13:05:35 crc kubenswrapper[4816]: E0216 13:05:35.399290 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.398350 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.398414 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.398425 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.398565 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.401593 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.401858 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.402328 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.403093 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.403400 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 16 13:05:37 crc kubenswrapper[4816]: I0216 13:05:37.404889 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.710198 4816 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.776587 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l7q8r"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.779578 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9mmht"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.780282 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.780454 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.780802 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.782144 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.782876 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jjn42"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.783408 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.783455 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.783492 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.783555 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.784395 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.792747 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.793914 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.797361 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-brmt2"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.797648 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.799248 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.800353 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.800600 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.802581 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.808077 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-c6kz6"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.808755 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-c6kz6" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.810727 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-f2dr7"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.811292 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.812215 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.812631 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.812817 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.813444 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-h72gw"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.814011 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.817731 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.818511 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.819539 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vrcl2"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.820649 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.820649 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.820921 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.821253 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.821275 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.821305 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.821463 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.821468 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.821774 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.822021 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.822027 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.822210 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.822289 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.822394 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.822534 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.822696 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.822722 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.823091 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.823147 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.823225 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.823253 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.823611 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.823795 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.823834 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.824056 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.824271 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.824327 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.824400 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-l9v8l"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.824599 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.824804 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.824872 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.825069 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.825233 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.825683 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.825798 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.825831 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.826257 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.826856 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.827256 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.827335 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.827735 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.827896 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.828291 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.828414 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.829252 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.829585 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.829890 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.829994 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qjdjp"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.830021 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.830286 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.830475 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.831620 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.832180 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.832415 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.832707 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.832921 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.833104 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.834619 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835126 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835134 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835863 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835322 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835375 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835456 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835481 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835503 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835522 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835534 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.835564 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.842731 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.848271 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.848629 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.851130 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.852827 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.852921 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.853061 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.854364 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.877450 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.877743 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.880086 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.883096 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.884137 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jhhtj"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.884691 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.884976 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.885059 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.886320 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.889976 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.890260 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.891719 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.891852 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.892102 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.892125 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.892247 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.892360 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.892474 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.892609 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.893071 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.895339 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.896569 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.896848 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.897087 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.898402 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.899212 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.901583 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.902157 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.904830 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.905238 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.905879 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.909191 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.909592 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.910239 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-lwmcd"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.921780 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.925059 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.926320 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.926874 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.927902 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.928522 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.928637 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.935155 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.935960 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.936982 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.937129 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.941668 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.942106 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jjn42"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.943130 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.944071 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.944755 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.945291 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.953430 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.954049 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.955771 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.956196 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.958959 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rtjrp"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.959666 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.960250 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.962281 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.962355 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.962743 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.962931 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.963085 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.963281 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.963704 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.964274 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.965102 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-8gs8z"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.965798 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.967488 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-966kl"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.967909 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.968928 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.969300 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.970721 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mcstk"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.971146 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.972144 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.972498 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.973901 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.975453 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-h72gw"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.975568 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9mmht"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.977185 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.978963 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l7q8r"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.980304 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-f2dr7"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.981368 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-bjtq8"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.982100 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-bjtq8" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.982893 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-c6kz6"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.983536 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.984193 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.986735 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.987891 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.989351 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.991037 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jhhtj"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.993235 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.996344 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.997548 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qjdjp"] Feb 16 13:05:40 crc kubenswrapper[4816]: I0216 13:05:40.999911 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-l9v8l"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.001691 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.003903 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.006431 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.008997 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.010396 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mcstk"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.012772 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vrcl2"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.014248 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.015404 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.016346 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-bjtq8"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.017464 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.018579 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.020100 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.021358 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-966kl"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.022566 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-brmt2"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.023712 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-p66fg"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.025163 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.025172 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-p9dwz"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.026018 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.027572 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.029000 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rtjrp"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.031731 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.032776 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-8gs8z"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.033905 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.035435 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-p9dwz"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.036854 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-p66fg"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.038080 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.039128 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-ltptv"] Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.039764 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.043056 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.062981 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.082600 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.103436 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.123730 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.143917 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.163404 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.183015 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.203929 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.222776 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.244019 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.264042 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.303238 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.324169 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.344099 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.363346 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.384322 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.409636 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.423767 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.443193 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.464607 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.483039 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.504758 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.523186 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.543111 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.563762 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.583593 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.603873 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.624040 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.644333 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.663864 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.684846 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.704118 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.731037 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.743839 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.763310 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.784184 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.803799 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.823784 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.844263 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.863405 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.884728 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.904528 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.924136 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.943908 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.962212 4816 request.go:700] Waited for 1.005808025s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dcatalog-operator-serving-cert&limit=500&resourceVersion=0 Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.964751 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 16 13:05:41 crc kubenswrapper[4816]: I0216 13:05:41.984021 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.003684 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.023533 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.043577 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.064086 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.084075 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.104629 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.123997 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.143526 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.162852 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.189870 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.203145 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.223081 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.244931 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.264129 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.283527 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.304271 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.323623 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.343098 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.363932 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.394326 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.404343 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.424032 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.444215 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.462848 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.483913 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.502981 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.523974 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.543195 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.563863 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.583623 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.603713 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.624776 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.643130 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.683400 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.707395 4816 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.723749 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.728544 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxvr5\" (UniqueName: \"kubernetes.io/projected/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-kube-api-access-qxvr5\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.728793 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.728917 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvdz6\" (UniqueName: \"kubernetes.io/projected/ef1e487f-08d8-4ba7-805a-68cf93ce434e-kube-api-access-bvdz6\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729007 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2ddt\" (UniqueName: \"kubernetes.io/projected/89f428fd-8717-4819-81d8-ee04443b38a5-kube-api-access-s2ddt\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729088 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22jc8\" (UniqueName: \"kubernetes.io/projected/a12efaa8-e8bf-47eb-b019-d5e1fe136221-kube-api-access-22jc8\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729156 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729205 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-serving-cert\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729269 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8dst\" (UniqueName: \"kubernetes.io/projected/ada6fd48-493b-41ec-aee5-dd526d65dcdb-kube-api-access-n8dst\") pod \"dns-operator-744455d44c-l9v8l\" (UID: \"ada6fd48-493b-41ec-aee5-dd526d65dcdb\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729344 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729420 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729483 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-trusted-ca-bundle\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729553 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0c171de2-9344-4919-986f-e6544cb7cf0a-auth-proxy-config\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729646 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a12efaa8-e8bf-47eb-b019-d5e1fe136221-etcd-client\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729793 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/28235c00-42f1-4935-9b42-c055518c28d3-images\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729860 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-trusted-ca\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729935 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-service-ca-bundle\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.729988 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-oauth-serving-cert\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730052 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730097 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/39dc10dd-2280-470a-b50e-272b7d1b705f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730131 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c171de2-9344-4919-986f-e6544cb7cf0a-config\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730164 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a12efaa8-e8bf-47eb-b019-d5e1fe136221-audit-dir\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730227 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-client-ca\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730322 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-tls\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730443 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-certificates\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730494 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6265fff8-9c3e-4f9c-84d6-4b304047d4b0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ff4c9\" (UID: \"6265fff8-9c3e-4f9c-84d6-4b304047d4b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730523 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730546 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef1e487f-08d8-4ba7-805a-68cf93ce434e-config\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730570 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-f58zp\" (UID: \"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730596 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-bound-sa-token\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730637 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730681 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-encryption-config\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730706 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730730 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2tll\" (UniqueName: \"kubernetes.io/projected/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-kube-api-access-d2tll\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730750 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxqj8\" (UniqueName: \"kubernetes.io/projected/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-kube-api-access-xxqj8\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730770 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730822 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-etcd-client\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730841 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-policies\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730860 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ef1e487f-08d8-4ba7-805a-68cf93ce434e-trusted-ca\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730882 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-etcd-serving-ca\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730905 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-config\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730919 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28235c00-42f1-4935-9b42-c055518c28d3-config\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730936 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd522\" (UniqueName: \"kubernetes.io/projected/28235c00-42f1-4935-9b42-c055518c28d3-kube-api-access-jd522\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730964 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730983 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-serving-cert\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.730998 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a12efaa8-e8bf-47eb-b019-d5e1fe136221-node-pullsecrets\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731016 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32f87458-d8fa-446e-ab8e-2b349e1152ad-serving-cert\") pod \"openshift-config-operator-7777fb866f-2wpnx\" (UID: \"32f87458-d8fa-446e-ab8e-2b349e1152ad\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731039 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/39dc10dd-2280-470a-b50e-272b7d1b705f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731058 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbrdg\" (UniqueName: \"kubernetes.io/projected/0c171de2-9344-4919-986f-e6544cb7cf0a-kube-api-access-tbrdg\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731074 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-config\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731091 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-config\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731107 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n529c\" (UniqueName: \"kubernetes.io/projected/6265fff8-9c3e-4f9c-84d6-4b304047d4b0-kube-api-access-n529c\") pod \"openshift-apiserver-operator-796bbdcf4f-ff4c9\" (UID: \"6265fff8-9c3e-4f9c-84d6-4b304047d4b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731123 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-audit-dir\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731139 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731172 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef1e487f-08d8-4ba7-805a-68cf93ce434e-serving-cert\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731189 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5fss\" (UniqueName: \"kubernetes.io/projected/1cae9b2f-0c46-4058-8b87-8d8cf933246c-kube-api-access-k5fss\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731210 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-config\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731224 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e4461b7c-5e7f-4ac9-bf37-2510584b4eb9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-t9xz7\" (UID: \"e4461b7c-5e7f-4ac9-bf37-2510584b4eb9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731239 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-audit\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731258 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv6s7\" (UniqueName: \"kubernetes.io/projected/e4461b7c-5e7f-4ac9-bf37-2510584b4eb9-kube-api-access-fv6s7\") pod \"cluster-samples-operator-665b6dd947-t9xz7\" (UID: \"e4461b7c-5e7f-4ac9-bf37-2510584b4eb9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731273 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a12efaa8-e8bf-47eb-b019-d5e1fe136221-serving-cert\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731295 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/32f87458-d8fa-446e-ab8e-2b349e1152ad-available-featuregates\") pod \"openshift-config-operator-7777fb866f-2wpnx\" (UID: \"32f87458-d8fa-446e-ab8e-2b349e1152ad\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731338 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-service-ca\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731357 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-serving-cert\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731372 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jjcp\" (UniqueName: \"kubernetes.io/projected/0196136d-4ea7-4015-97d2-c885db51c66f-kube-api-access-6jjcp\") pod \"downloads-7954f5f757-c6kz6\" (UID: \"0196136d-4ea7-4015-97d2-c885db51c66f\") " pod="openshift-console/downloads-7954f5f757-c6kz6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731389 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731404 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0c171de2-9344-4919-986f-e6544cb7cf0a-machine-approver-tls\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731420 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d95b2\" (UniqueName: \"kubernetes.io/projected/d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb-kube-api-access-d95b2\") pod \"openshift-controller-manager-operator-756b6f6bc6-f58zp\" (UID: \"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731448 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-serving-cert\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731462 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-image-import-ca\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731475 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cae9b2f-0c46-4058-8b87-8d8cf933246c-serving-cert\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731492 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-audit-policies\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731508 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6265fff8-9c3e-4f9c-84d6-4b304047d4b0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ff4c9\" (UID: \"6265fff8-9c3e-4f9c-84d6-4b304047d4b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731521 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-dir\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731534 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a12efaa8-e8bf-47eb-b019-d5e1fe136221-encryption-config\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731551 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-client-ca\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731564 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731577 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731591 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731607 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr4hx\" (UniqueName: \"kubernetes.io/projected/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-kube-api-access-pr4hx\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731622 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-f58zp\" (UID: \"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731640 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731678 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwtnj\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-kube-api-access-kwtnj\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731694 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-console-config\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731719 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731734 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spkpm\" (UniqueName: \"kubernetes.io/projected/32f87458-d8fa-446e-ab8e-2b349e1152ad-kube-api-access-spkpm\") pod \"openshift-config-operator-7777fb866f-2wpnx\" (UID: \"32f87458-d8fa-446e-ab8e-2b349e1152ad\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731748 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-oauth-config\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731764 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/28235c00-42f1-4935-9b42-c055518c28d3-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.731781 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ada6fd48-493b-41ec-aee5-dd526d65dcdb-metrics-tls\") pod \"dns-operator-744455d44c-l9v8l\" (UID: \"ada6fd48-493b-41ec-aee5-dd526d65dcdb\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" Feb 16 13:05:42 crc kubenswrapper[4816]: E0216 13:05:42.735851 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.232511617 +0000 UTC m=+142.559225345 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.743246 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.763213 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.783067 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.804347 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.823161 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.832546 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:42 crc kubenswrapper[4816]: E0216 13:05:42.832793 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.332761134 +0000 UTC m=+142.659474862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.832849 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-config\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.832896 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e34f8aa9-54c5-4964-a481-ff6745ec54d8-secret-volume\") pod \"collect-profiles-29520780-nkg88\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.832920 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/75810b59-18d3-400b-ab38-25d3dcf7cea5-tmpfs\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.832952 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-audit-dir\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.832980 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833005 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef1e487f-08d8-4ba7-805a-68cf93ce434e-serving-cert\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833028 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5fss\" (UniqueName: \"kubernetes.io/projected/1cae9b2f-0c46-4058-8b87-8d8cf933246c-kube-api-access-k5fss\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833038 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-audit-dir\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833057 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e4461b7c-5e7f-4ac9-bf37-2510584b4eb9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-t9xz7\" (UID: \"e4461b7c-5e7f-4ac9-bf37-2510584b4eb9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833081 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-audit\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833107 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f3bfee-47a2-4347-872f-b4da6aeb68e5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-kmpxb\" (UID: \"49f3bfee-47a2-4347-872f-b4da6aeb68e5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833133 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e27acd41-ceb3-439f-b3f7-6ad547609158-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833157 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3612ec61-6d09-4cf6-abc7-aa0258e232ea-service-ca-bundle\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833195 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv6s7\" (UniqueName: \"kubernetes.io/projected/e4461b7c-5e7f-4ac9-bf37-2510584b4eb9-kube-api-access-fv6s7\") pod \"cluster-samples-operator-665b6dd947-t9xz7\" (UID: \"e4461b7c-5e7f-4ac9-bf37-2510584b4eb9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833220 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-966kl\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833242 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3612ec61-6d09-4cf6-abc7-aa0258e232ea-default-certificate\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833278 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/32f87458-d8fa-446e-ab8e-2b349e1152ad-available-featuregates\") pod \"openshift-config-operator-7777fb866f-2wpnx\" (UID: \"32f87458-d8fa-446e-ab8e-2b349e1152ad\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833303 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-service-ca\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833327 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-serving-cert\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833346 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jjcp\" (UniqueName: \"kubernetes.io/projected/0196136d-4ea7-4015-97d2-c885db51c66f-kube-api-access-6jjcp\") pod \"downloads-7954f5f757-c6kz6\" (UID: \"0196136d-4ea7-4015-97d2-c885db51c66f\") " pod="openshift-console/downloads-7954f5f757-c6kz6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833369 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833393 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0c171de2-9344-4919-986f-e6544cb7cf0a-machine-approver-tls\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833417 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znvv5\" (UniqueName: \"kubernetes.io/projected/49f3bfee-47a2-4347-872f-b4da6aeb68e5-kube-api-access-znvv5\") pod \"kube-storage-version-migrator-operator-b67b599dd-kmpxb\" (UID: \"49f3bfee-47a2-4347-872f-b4da6aeb68e5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833438 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833465 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-serving-cert\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833536 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-image-import-ca\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833558 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cae9b2f-0c46-4058-8b87-8d8cf933246c-serving-cert\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833580 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4f0edb6a-4994-4385-b66a-7bf8fad44d60-srv-cert\") pod \"olm-operator-6b444d44fb-4qmwv\" (UID: \"4f0edb6a-4994-4385-b66a-7bf8fad44d60\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833609 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4073598c-2443-4558-8eb3-d7a5b6a15c75-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xj8mn\" (UID: \"4073598c-2443-4558-8eb3-d7a5b6a15c75\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833631 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0d109fdb-3df5-4760-88d6-7e63ceb417e6-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rtjrp\" (UID: \"0d109fdb-3df5-4760-88d6-7e63ceb417e6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833682 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44e4d3ad-4a18-4b32-a575-79829e3cd784-bound-sa-token\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833709 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-client-ca\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833735 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833756 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-f58zp\" (UID: \"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833779 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4f0edb6a-4994-4385-b66a-7bf8fad44d60-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4qmwv\" (UID: \"4f0edb6a-4994-4385-b66a-7bf8fad44d60\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833818 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ac5acee1-9801-43d5-ab3e-16a00d6f98a2-node-bootstrap-token\") pod \"machine-config-server-ltptv\" (UID: \"ac5acee1-9801-43d5-ab3e-16a00d6f98a2\") " pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833847 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833869 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44e4d3ad-4a18-4b32-a575-79829e3cd784-trusted-ca\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833915 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/28235c00-42f1-4935-9b42-c055518c28d3-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833942 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3612ec61-6d09-4cf6-abc7-aa0258e232ea-metrics-certs\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833971 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppffv\" (UniqueName: \"kubernetes.io/projected/0d109fdb-3df5-4760-88d6-7e63ceb417e6-kube-api-access-ppffv\") pod \"multus-admission-controller-857f4d67dd-rtjrp\" (UID: \"0d109fdb-3df5-4760-88d6-7e63ceb417e6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.833994 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75810b59-18d3-400b-ab38-25d3dcf7cea5-apiservice-cert\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834015 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7ae3017b-8a59-4109-83c2-191b8f54831c-signing-cabundle\") pod \"service-ca-9c57cc56f-8gs8z\" (UID: \"7ae3017b-8a59-4109-83c2-191b8f54831c\") " pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834072 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxvr5\" (UniqueName: \"kubernetes.io/projected/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-kube-api-access-qxvr5\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834134 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834158 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvdz6\" (UniqueName: \"kubernetes.io/projected/ef1e487f-08d8-4ba7-805a-68cf93ce434e-kube-api-access-bvdz6\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834183 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8vmp\" (UniqueName: \"kubernetes.io/projected/7ae3017b-8a59-4109-83c2-191b8f54831c-kube-api-access-m8vmp\") pod \"service-ca-9c57cc56f-8gs8z\" (UID: \"7ae3017b-8a59-4109-83c2-191b8f54831c\") " pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834206 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/22166fdb-00e3-43e2-9a1d-d75446964277-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-86gqx\" (UID: \"22166fdb-00e3-43e2-9a1d-d75446964277\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834231 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4073598c-2443-4558-8eb3-d7a5b6a15c75-config\") pod \"kube-controller-manager-operator-78b949d7b-xj8mn\" (UID: \"4073598c-2443-4558-8eb3-d7a5b6a15c75\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834253 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-images\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834324 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834347 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-serving-cert\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834363 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-trusted-ca-bundle\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834379 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0c171de2-9344-4919-986f-e6544cb7cf0a-auth-proxy-config\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834394 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a12efaa8-e8bf-47eb-b019-d5e1fe136221-etcd-client\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834410 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k42s\" (UniqueName: \"kubernetes.io/projected/ac5acee1-9801-43d5-ab3e-16a00d6f98a2-kube-api-access-6k42s\") pod \"machine-config-server-ltptv\" (UID: \"ac5acee1-9801-43d5-ab3e-16a00d6f98a2\") " pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834431 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-trusted-ca\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834469 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-service-ca-bundle\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834484 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-oauth-serving-cert\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834502 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lrtd\" (UniqueName: \"kubernetes.io/projected/9f11459f-3567-4ed0-8bf3-d55ca3507378-kube-api-access-8lrtd\") pod \"marketplace-operator-79b997595-966kl\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834520 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/39dc10dd-2280-470a-b50e-272b7d1b705f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834521 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-service-ca\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834538 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-client-ca\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834572 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkg5b\" (UniqueName: \"kubernetes.io/projected/003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb-kube-api-access-pkg5b\") pod \"catalog-operator-68c6474976-rn5jb\" (UID: \"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834601 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b2badeb2-df5f-473e-b74a-5948ce933a4b-etcd-service-ca\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834630 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-tls\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834692 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-certificates\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834718 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6265fff8-9c3e-4f9c-84d6-4b304047d4b0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ff4c9\" (UID: \"6265fff8-9c3e-4f9c-84d6-4b304047d4b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834774 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef1e487f-08d8-4ba7-805a-68cf93ce434e-config\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834798 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-f58zp\" (UID: \"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834826 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bct74\" (UniqueName: \"kubernetes.io/projected/b2badeb2-df5f-473e-b74a-5948ce933a4b-kube-api-access-bct74\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834851 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2badeb2-df5f-473e-b74a-5948ce933a4b-serving-cert\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834874 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e27acd41-ceb3-439f-b3f7-6ad547609158-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834899 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/96c042d0-6736-4ff8-ae21-d07442236cec-config-volume\") pod \"dns-default-p9dwz\" (UID: \"96c042d0-6736-4ff8-ae21-d07442236cec\") " pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.834956 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b2badeb2-df5f-473e-b74a-5948ce933a4b-etcd-ca\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835044 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4d2516c-521d-4e48-a547-81c06a8cda3e-config\") pod \"kube-apiserver-operator-766d6c64bb-8g477\" (UID: \"c4d2516c-521d-4e48-a547-81c06a8cda3e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835075 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-encryption-config\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835096 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/436a9ea3-c8a5-4445-89bb-21133fe76d23-config\") pod \"service-ca-operator-777779d784-mcstk\" (UID: \"436a9ea3-c8a5-4445-89bb-21133fe76d23\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835124 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2tll\" (UniqueName: \"kubernetes.io/projected/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-kube-api-access-d2tll\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835149 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-etcd-client\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835169 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-etcd-serving-ca\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835189 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsqrt\" (UniqueName: \"kubernetes.io/projected/e34f8aa9-54c5-4964-a481-ff6745ec54d8-kube-api-access-gsqrt\") pod \"collect-profiles-29520780-nkg88\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835212 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/44e4d3ad-4a18-4b32-a575-79829e3cd784-metrics-tls\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835222 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-audit\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835279 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-client-ca\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835366 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-config\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835235 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z69sc\" (UniqueName: \"kubernetes.io/projected/75810b59-18d3-400b-ab38-25d3dcf7cea5-kube-api-access-z69sc\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835446 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28235c00-42f1-4935-9b42-c055518c28d3-config\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835486 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd522\" (UniqueName: \"kubernetes.io/projected/28235c00-42f1-4935-9b42-c055518c28d3-kube-api-access-jd522\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835515 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ac5acee1-9801-43d5-ab3e-16a00d6f98a2-certs\") pod \"machine-config-server-ltptv\" (UID: \"ac5acee1-9801-43d5-ab3e-16a00d6f98a2\") " pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835537 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gbwq\" (UniqueName: \"kubernetes.io/projected/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-kube-api-access-7gbwq\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835545 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-client-ca\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835564 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9djzp\" (UniqueName: \"kubernetes.io/projected/96c042d0-6736-4ff8-ae21-d07442236cec-kube-api-access-9djzp\") pod \"dns-default-p9dwz\" (UID: \"96c042d0-6736-4ff8-ae21-d07442236cec\") " pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835592 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-serving-cert\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835702 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a12efaa8-e8bf-47eb-b019-d5e1fe136221-node-pullsecrets\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835726 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7ae3017b-8a59-4109-83c2-191b8f54831c-signing-key\") pod \"service-ca-9c57cc56f-8gs8z\" (UID: \"7ae3017b-8a59-4109-83c2-191b8f54831c\") " pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835750 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4d2516c-521d-4e48-a547-81c06a8cda3e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8g477\" (UID: \"c4d2516c-521d-4e48-a547-81c06a8cda3e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835778 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32f87458-d8fa-446e-ab8e-2b349e1152ad-serving-cert\") pod \"openshift-config-operator-7777fb866f-2wpnx\" (UID: \"32f87458-d8fa-446e-ab8e-2b349e1152ad\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835814 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/39dc10dd-2280-470a-b50e-272b7d1b705f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835818 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6265fff8-9c3e-4f9c-84d6-4b304047d4b0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ff4c9\" (UID: \"6265fff8-9c3e-4f9c-84d6-4b304047d4b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835838 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3612ec61-6d09-4cf6-abc7-aa0258e232ea-stats-auth\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835934 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbrdg\" (UniqueName: \"kubernetes.io/projected/0c171de2-9344-4919-986f-e6544cb7cf0a-kube-api-access-tbrdg\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835961 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-config\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.835988 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wvbp\" (UniqueName: \"kubernetes.io/projected/7be610ca-65bb-4abc-809f-37a005b6f491-kube-api-access-9wvbp\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836014 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n529c\" (UniqueName: \"kubernetes.io/projected/6265fff8-9c3e-4f9c-84d6-4b304047d4b0-kube-api-access-n529c\") pod \"openshift-apiserver-operator-796bbdcf4f-ff4c9\" (UID: \"6265fff8-9c3e-4f9c-84d6-4b304047d4b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836037 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb-profile-collector-cert\") pod \"catalog-operator-68c6474976-rn5jb\" (UID: \"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836063 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-844md\" (UID: \"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836090 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-config\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836119 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbkkn\" (UniqueName: \"kubernetes.io/projected/436a9ea3-c8a5-4445-89bb-21133fe76d23-kube-api-access-mbkkn\") pod \"service-ca-operator-777779d784-mcstk\" (UID: \"436a9ea3-c8a5-4445-89bb-21133fe76d23\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836145 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a12efaa8-e8bf-47eb-b019-d5e1fe136221-serving-cert\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836172 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwb2v\" (UniqueName: \"kubernetes.io/projected/22166fdb-00e3-43e2-9a1d-d75446964277-kube-api-access-gwb2v\") pod \"package-server-manager-789f6589d5-86gqx\" (UID: \"22166fdb-00e3-43e2-9a1d-d75446964277\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836196 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr8s8\" (UniqueName: \"kubernetes.io/projected/7dc7b0ca-d149-48fa-acac-a7835087634b-kube-api-access-hr8s8\") pod \"ingress-canary-bjtq8\" (UID: \"7dc7b0ca-d149-48fa-acac-a7835087634b\") " pod="openshift-ingress-canary/ingress-canary-bjtq8" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836220 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-mountpoint-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836376 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef1e487f-08d8-4ba7-805a-68cf93ce434e-config\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836409 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96c042d0-6736-4ff8-ae21-d07442236cec-metrics-tls\") pod \"dns-default-p9dwz\" (UID: \"96c042d0-6736-4ff8-ae21-d07442236cec\") " pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836441 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-844md\" (UID: \"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836464 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-plugins-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836497 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d95b2\" (UniqueName: \"kubernetes.io/projected/d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb-kube-api-access-d95b2\") pod \"openshift-controller-manager-operator-756b6f6bc6-f58zp\" (UID: \"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836520 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2badeb2-df5f-473e-b74a-5948ce933a4b-config\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836542 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e27acd41-ceb3-439f-b3f7-6ad547609158-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836568 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfrtt\" (UniqueName: \"kubernetes.io/projected/963d9667-805f-49b3-a315-e61e0f6718bf-kube-api-access-wfrtt\") pod \"migrator-59844c95c7-vgdfk\" (UID: \"963d9667-805f-49b3-a315-e61e0f6718bf\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836607 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-audit-policies\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836630 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-844md\" (UID: \"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836669 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e34f8aa9-54c5-4964-a481-ff6745ec54d8-config-volume\") pod \"collect-profiles-29520780-nkg88\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836698 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6265fff8-9c3e-4f9c-84d6-4b304047d4b0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ff4c9\" (UID: \"6265fff8-9c3e-4f9c-84d6-4b304047d4b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836725 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-dir\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836748 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a12efaa8-e8bf-47eb-b019-d5e1fe136221-encryption-config\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.836771 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-proxy-tls\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.838997 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-f58zp\" (UID: \"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.839436 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.839512 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-f58zp\" (UID: \"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.839629 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a12efaa8-e8bf-47eb-b019-d5e1fe136221-node-pullsecrets\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.839860 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-serving-cert\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.839917 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28235c00-42f1-4935-9b42-c055518c28d3-config\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.839967 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/28235c00-42f1-4935-9b42-c055518c28d3-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.840101 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/32f87458-d8fa-446e-ab8e-2b349e1152ad-available-featuregates\") pod \"openshift-config-operator-7777fb866f-2wpnx\" (UID: \"32f87458-d8fa-446e-ab8e-2b349e1152ad\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.840603 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e4461b7c-5e7f-4ac9-bf37-2510584b4eb9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-t9xz7\" (UID: \"e4461b7c-5e7f-4ac9-bf37-2510584b4eb9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.841020 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-etcd-serving-ca\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.841205 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.841206 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-encryption-config\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.841314 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.841487 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0c171de2-9344-4919-986f-e6544cb7cf0a-auth-proxy-config\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.842106 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.842292 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.842483 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef1e487f-08d8-4ba7-805a-68cf93ce434e-serving-cert\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.842780 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cae9b2f-0c46-4058-8b87-8d8cf933246c-serving-cert\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.843252 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-trusted-ca-bundle\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.843944 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.844289 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-config\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.844494 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.845391 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-tls\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.845521 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-service-ca-bundle\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.845852 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32f87458-d8fa-446e-ab8e-2b349e1152ad-serving-cert\") pod \"openshift-config-operator-7777fb866f-2wpnx\" (UID: \"32f87458-d8fa-446e-ab8e-2b349e1152ad\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.846087 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/39dc10dd-2280-470a-b50e-272b7d1b705f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.846320 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-config\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.846501 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-etcd-client\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.846595 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-serving-cert\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.846807 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0c171de2-9344-4919-986f-e6544cb7cf0a-machine-approver-tls\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.846946 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.846987 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847018 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr4hx\" (UniqueName: \"kubernetes.io/projected/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-kube-api-access-pr4hx\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847051 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrtrc\" (UniqueName: \"kubernetes.io/projected/3612ec61-6d09-4cf6-abc7-aa0258e232ea-kube-api-access-mrtrc\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847052 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-oauth-serving-cert\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847080 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crtjd\" (UniqueName: \"kubernetes.io/projected/4f0edb6a-4994-4385-b66a-7bf8fad44d60-kube-api-access-crtjd\") pod \"olm-operator-6b444d44fb-4qmwv\" (UID: \"4f0edb6a-4994-4385-b66a-7bf8fad44d60\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847114 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwtnj\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-kube-api-access-kwtnj\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847141 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-console-config\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847203 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5qqt\" (UniqueName: \"kubernetes.io/projected/44e4d3ad-4a18-4b32-a575-79829e3cd784-kube-api-access-v5qqt\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847231 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-registration-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847257 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847278 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spkpm\" (UniqueName: \"kubernetes.io/projected/32f87458-d8fa-446e-ab8e-2b349e1152ad-kube-api-access-spkpm\") pod \"openshift-config-operator-7777fb866f-2wpnx\" (UID: \"32f87458-d8fa-446e-ab8e-2b349e1152ad\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847298 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-oauth-config\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847316 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/13129e04-c3d9-4387-bd36-b673b082d90e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bcsnm\" (UID: \"13129e04-c3d9-4387-bd36-b673b082d90e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847335 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4073598c-2443-4558-8eb3-d7a5b6a15c75-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xj8mn\" (UID: \"4073598c-2443-4558-8eb3-d7a5b6a15c75\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847356 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-csi-data-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847387 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ada6fd48-493b-41ec-aee5-dd526d65dcdb-metrics-tls\") pod \"dns-operator-744455d44c-l9v8l\" (UID: \"ada6fd48-493b-41ec-aee5-dd526d65dcdb\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847414 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-socket-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847437 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2ddt\" (UniqueName: \"kubernetes.io/projected/89f428fd-8717-4819-81d8-ee04443b38a5-kube-api-access-s2ddt\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847459 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22jc8\" (UniqueName: \"kubernetes.io/projected/a12efaa8-e8bf-47eb-b019-d5e1fe136221-kube-api-access-22jc8\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847527 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tjhx\" (UniqueName: \"kubernetes.io/projected/e27acd41-ceb3-439f-b3f7-6ad547609158-kube-api-access-6tjhx\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847552 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f0e84263-7736-4b18-a955-7fadfb307294-proxy-tls\") pod \"machine-config-controller-84d6567774-5ljlg\" (UID: \"f0e84263-7736-4b18-a955-7fadfb307294\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847582 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8dst\" (UniqueName: \"kubernetes.io/projected/ada6fd48-493b-41ec-aee5-dd526d65dcdb-kube-api-access-n8dst\") pod \"dns-operator-744455d44c-l9v8l\" (UID: \"ada6fd48-493b-41ec-aee5-dd526d65dcdb\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847609 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847635 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847674 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/28235c00-42f1-4935-9b42-c055518c28d3-images\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847706 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4frp\" (UniqueName: \"kubernetes.io/projected/13129e04-c3d9-4387-bd36-b673b082d90e-kube-api-access-x4frp\") pod \"control-plane-machine-set-operator-78cbb6b69f-bcsnm\" (UID: \"13129e04-c3d9-4387-bd36-b673b082d90e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847732 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-966kl\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847757 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847784 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c171de2-9344-4919-986f-e6544cb7cf0a-config\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847808 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a12efaa8-e8bf-47eb-b019-d5e1fe136221-audit-dir\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847834 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbt9m\" (UniqueName: \"kubernetes.io/projected/f0e84263-7736-4b18-a955-7fadfb307294-kube-api-access-pbt9m\") pod \"machine-config-controller-84d6567774-5ljlg\" (UID: \"f0e84263-7736-4b18-a955-7fadfb307294\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847860 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847885 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-bound-sa-token\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847909 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb-srv-cert\") pod \"catalog-operator-68c6474976-rn5jb\" (UID: \"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847917 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-certificates\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847933 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f3bfee-47a2-4347-872f-b4da6aeb68e5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-kmpxb\" (UID: \"49f3bfee-47a2-4347-872f-b4da6aeb68e5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847965 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b2badeb2-df5f-473e-b74a-5948ce933a4b-etcd-client\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847997 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848022 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75810b59-18d3-400b-ab38-25d3dcf7cea5-webhook-cert\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848051 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848080 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxqj8\" (UniqueName: \"kubernetes.io/projected/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-kube-api-access-xxqj8\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848105 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848127 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-policies\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848150 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ef1e487f-08d8-4ba7-805a-68cf93ce434e-trusted-ca\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848174 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4d2516c-521d-4e48-a547-81c06a8cda3e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8g477\" (UID: \"c4d2516c-521d-4e48-a547-81c06a8cda3e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848204 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-config\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848222 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-console-config\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848242 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848269 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7dc7b0ca-d149-48fa-acac-a7835087634b-cert\") pod \"ingress-canary-bjtq8\" (UID: \"7dc7b0ca-d149-48fa-acac-a7835087634b\") " pod="openshift-ingress-canary/ingress-canary-bjtq8" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848296 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f0e84263-7736-4b18-a955-7fadfb307294-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5ljlg\" (UID: \"f0e84263-7736-4b18-a955-7fadfb307294\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848320 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/436a9ea3-c8a5-4445-89bb-21133fe76d23-serving-cert\") pod \"service-ca-operator-777779d784-mcstk\" (UID: \"436a9ea3-c8a5-4445-89bb-21133fe76d23\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848507 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.848574 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: E0216 13:05:42.848746 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.348730248 +0000 UTC m=+142.675444086 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.849387 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ef1e487f-08d8-4ba7-805a-68cf93ce434e-trusted-ca\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.849491 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-config\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.850258 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.850967 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a12efaa8-e8bf-47eb-b019-d5e1fe136221-encryption-config\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.851199 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-trusted-ca\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.851272 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/39dc10dd-2280-470a-b50e-272b7d1b705f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.851295 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-serving-cert\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.851579 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.847325 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6265fff8-9c3e-4f9c-84d6-4b304047d4b0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ff4c9\" (UID: \"6265fff8-9c3e-4f9c-84d6-4b304047d4b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.851616 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a12efaa8-e8bf-47eb-b019-d5e1fe136221-serving-cert\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.852259 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-policies\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.852526 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c171de2-9344-4919-986f-e6544cb7cf0a-config\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.852747 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-serving-cert\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.852763 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-image-import-ca\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.852807 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-dir\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.852816 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a12efaa8-e8bf-47eb-b019-d5e1fe136221-audit-dir\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.852961 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.853152 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-audit-policies\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.853140 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.853692 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/28235c00-42f1-4935-9b42-c055518c28d3-images\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.854682 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.854864 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a12efaa8-e8bf-47eb-b019-d5e1fe136221-etcd-client\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.855241 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ada6fd48-493b-41ec-aee5-dd526d65dcdb-metrics-tls\") pod \"dns-operator-744455d44c-l9v8l\" (UID: \"ada6fd48-493b-41ec-aee5-dd526d65dcdb\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.855344 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.855789 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-oauth-config\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.856338 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a12efaa8-e8bf-47eb-b019-d5e1fe136221-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.859077 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.904837 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5fss\" (UniqueName: \"kubernetes.io/projected/1cae9b2f-0c46-4058-8b87-8d8cf933246c-kube-api-access-k5fss\") pod \"route-controller-manager-6576b87f9c-9pl8s\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.919044 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv6s7\" (UniqueName: \"kubernetes.io/projected/e4461b7c-5e7f-4ac9-bf37-2510584b4eb9-kube-api-access-fv6s7\") pod \"cluster-samples-operator-665b6dd947-t9xz7\" (UID: \"e4461b7c-5e7f-4ac9-bf37-2510584b4eb9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.945907 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jjcp\" (UniqueName: \"kubernetes.io/projected/0196136d-4ea7-4015-97d2-c885db51c66f-kube-api-access-6jjcp\") pod \"downloads-7954f5f757-c6kz6\" (UID: \"0196136d-4ea7-4015-97d2-c885db51c66f\") " pod="openshift-console/downloads-7954f5f757-c6kz6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.948779 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:42 crc kubenswrapper[4816]: E0216 13:05:42.948915 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.448900592 +0000 UTC m=+142.775614320 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.948947 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4f0edb6a-4994-4385-b66a-7bf8fad44d60-srv-cert\") pod \"olm-operator-6b444d44fb-4qmwv\" (UID: \"4f0edb6a-4994-4385-b66a-7bf8fad44d60\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.948965 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4073598c-2443-4558-8eb3-d7a5b6a15c75-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xj8mn\" (UID: \"4073598c-2443-4558-8eb3-d7a5b6a15c75\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.948981 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0d109fdb-3df5-4760-88d6-7e63ceb417e6-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rtjrp\" (UID: \"0d109fdb-3df5-4760-88d6-7e63ceb417e6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.949002 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44e4d3ad-4a18-4b32-a575-79829e3cd784-bound-sa-token\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.949024 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4f0edb6a-4994-4385-b66a-7bf8fad44d60-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4qmwv\" (UID: \"4f0edb6a-4994-4385-b66a-7bf8fad44d60\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.949051 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ac5acee1-9801-43d5-ab3e-16a00d6f98a2-node-bootstrap-token\") pod \"machine-config-server-ltptv\" (UID: \"ac5acee1-9801-43d5-ab3e-16a00d6f98a2\") " pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.949073 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44e4d3ad-4a18-4b32-a575-79829e3cd784-trusted-ca\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.949112 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3612ec61-6d09-4cf6-abc7-aa0258e232ea-metrics-certs\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.949135 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppffv\" (UniqueName: \"kubernetes.io/projected/0d109fdb-3df5-4760-88d6-7e63ceb417e6-kube-api-access-ppffv\") pod \"multus-admission-controller-857f4d67dd-rtjrp\" (UID: \"0d109fdb-3df5-4760-88d6-7e63ceb417e6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.949158 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75810b59-18d3-400b-ab38-25d3dcf7cea5-apiservice-cert\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.949188 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7ae3017b-8a59-4109-83c2-191b8f54831c-signing-cabundle\") pod \"service-ca-9c57cc56f-8gs8z\" (UID: \"7ae3017b-8a59-4109-83c2-191b8f54831c\") " pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.949218 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8vmp\" (UniqueName: \"kubernetes.io/projected/7ae3017b-8a59-4109-83c2-191b8f54831c-kube-api-access-m8vmp\") pod \"service-ca-9c57cc56f-8gs8z\" (UID: \"7ae3017b-8a59-4109-83c2-191b8f54831c\") " pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.950419 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/44e4d3ad-4a18-4b32-a575-79829e3cd784-trusted-ca\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.950852 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/22166fdb-00e3-43e2-9a1d-d75446964277-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-86gqx\" (UID: \"22166fdb-00e3-43e2-9a1d-d75446964277\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.950887 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4073598c-2443-4558-8eb3-d7a5b6a15c75-config\") pod \"kube-controller-manager-operator-78b949d7b-xj8mn\" (UID: \"4073598c-2443-4558-8eb3-d7a5b6a15c75\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.950909 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-images\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.950940 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k42s\" (UniqueName: \"kubernetes.io/projected/ac5acee1-9801-43d5-ab3e-16a00d6f98a2-kube-api-access-6k42s\") pod \"machine-config-server-ltptv\" (UID: \"ac5acee1-9801-43d5-ab3e-16a00d6f98a2\") " pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.950967 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lrtd\" (UniqueName: \"kubernetes.io/projected/9f11459f-3567-4ed0-8bf3-d55ca3507378-kube-api-access-8lrtd\") pod \"marketplace-operator-79b997595-966kl\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.950993 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkg5b\" (UniqueName: \"kubernetes.io/projected/003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb-kube-api-access-pkg5b\") pod \"catalog-operator-68c6474976-rn5jb\" (UID: \"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951015 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b2badeb2-df5f-473e-b74a-5948ce933a4b-etcd-service-ca\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951039 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bct74\" (UniqueName: \"kubernetes.io/projected/b2badeb2-df5f-473e-b74a-5948ce933a4b-kube-api-access-bct74\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951072 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2badeb2-df5f-473e-b74a-5948ce933a4b-serving-cert\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951095 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e27acd41-ceb3-439f-b3f7-6ad547609158-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951117 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/96c042d0-6736-4ff8-ae21-d07442236cec-config-volume\") pod \"dns-default-p9dwz\" (UID: \"96c042d0-6736-4ff8-ae21-d07442236cec\") " pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951144 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b2badeb2-df5f-473e-b74a-5948ce933a4b-etcd-ca\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951166 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4d2516c-521d-4e48-a547-81c06a8cda3e-config\") pod \"kube-apiserver-operator-766d6c64bb-8g477\" (UID: \"c4d2516c-521d-4e48-a547-81c06a8cda3e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951188 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/436a9ea3-c8a5-4445-89bb-21133fe76d23-config\") pod \"service-ca-operator-777779d784-mcstk\" (UID: \"436a9ea3-c8a5-4445-89bb-21133fe76d23\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951219 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z69sc\" (UniqueName: \"kubernetes.io/projected/75810b59-18d3-400b-ab38-25d3dcf7cea5-kube-api-access-z69sc\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951243 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsqrt\" (UniqueName: \"kubernetes.io/projected/e34f8aa9-54c5-4964-a481-ff6745ec54d8-kube-api-access-gsqrt\") pod \"collect-profiles-29520780-nkg88\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951269 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/44e4d3ad-4a18-4b32-a575-79829e3cd784-metrics-tls\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951306 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ac5acee1-9801-43d5-ab3e-16a00d6f98a2-certs\") pod \"machine-config-server-ltptv\" (UID: \"ac5acee1-9801-43d5-ab3e-16a00d6f98a2\") " pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951330 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gbwq\" (UniqueName: \"kubernetes.io/projected/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-kube-api-access-7gbwq\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951352 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9djzp\" (UniqueName: \"kubernetes.io/projected/96c042d0-6736-4ff8-ae21-d07442236cec-kube-api-access-9djzp\") pod \"dns-default-p9dwz\" (UID: \"96c042d0-6736-4ff8-ae21-d07442236cec\") " pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951374 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7ae3017b-8a59-4109-83c2-191b8f54831c-signing-key\") pod \"service-ca-9c57cc56f-8gs8z\" (UID: \"7ae3017b-8a59-4109-83c2-191b8f54831c\") " pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951395 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4d2516c-521d-4e48-a547-81c06a8cda3e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8g477\" (UID: \"c4d2516c-521d-4e48-a547-81c06a8cda3e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951429 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3612ec61-6d09-4cf6-abc7-aa0258e232ea-stats-auth\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951454 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4f0edb6a-4994-4385-b66a-7bf8fad44d60-srv-cert\") pod \"olm-operator-6b444d44fb-4qmwv\" (UID: \"4f0edb6a-4994-4385-b66a-7bf8fad44d60\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951461 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wvbp\" (UniqueName: \"kubernetes.io/projected/7be610ca-65bb-4abc-809f-37a005b6f491-kube-api-access-9wvbp\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951515 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb-profile-collector-cert\") pod \"catalog-operator-68c6474976-rn5jb\" (UID: \"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951485 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4073598c-2443-4558-8eb3-d7a5b6a15c75-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xj8mn\" (UID: \"4073598c-2443-4558-8eb3-d7a5b6a15c75\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951539 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-844md\" (UID: \"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951570 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbkkn\" (UniqueName: \"kubernetes.io/projected/436a9ea3-c8a5-4445-89bb-21133fe76d23-kube-api-access-mbkkn\") pod \"service-ca-operator-777779d784-mcstk\" (UID: \"436a9ea3-c8a5-4445-89bb-21133fe76d23\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951595 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwb2v\" (UniqueName: \"kubernetes.io/projected/22166fdb-00e3-43e2-9a1d-d75446964277-kube-api-access-gwb2v\") pod \"package-server-manager-789f6589d5-86gqx\" (UID: \"22166fdb-00e3-43e2-9a1d-d75446964277\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951620 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr8s8\" (UniqueName: \"kubernetes.io/projected/7dc7b0ca-d149-48fa-acac-a7835087634b-kube-api-access-hr8s8\") pod \"ingress-canary-bjtq8\" (UID: \"7dc7b0ca-d149-48fa-acac-a7835087634b\") " pod="openshift-ingress-canary/ingress-canary-bjtq8" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951645 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-mountpoint-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951685 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96c042d0-6736-4ff8-ae21-d07442236cec-metrics-tls\") pod \"dns-default-p9dwz\" (UID: \"96c042d0-6736-4ff8-ae21-d07442236cec\") " pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951711 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-844md\" (UID: \"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951733 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-plugins-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951762 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2badeb2-df5f-473e-b74a-5948ce933a4b-config\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951783 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e27acd41-ceb3-439f-b3f7-6ad547609158-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951805 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfrtt\" (UniqueName: \"kubernetes.io/projected/963d9667-805f-49b3-a315-e61e0f6718bf-kube-api-access-wfrtt\") pod \"migrator-59844c95c7-vgdfk\" (UID: \"963d9667-805f-49b3-a315-e61e0f6718bf\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951830 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e34f8aa9-54c5-4964-a481-ff6745ec54d8-config-volume\") pod \"collect-profiles-29520780-nkg88\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951853 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-844md\" (UID: \"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951875 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-proxy-tls\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951895 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrtrc\" (UniqueName: \"kubernetes.io/projected/3612ec61-6d09-4cf6-abc7-aa0258e232ea-kube-api-access-mrtrc\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951928 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crtjd\" (UniqueName: \"kubernetes.io/projected/4f0edb6a-4994-4385-b66a-7bf8fad44d60-kube-api-access-crtjd\") pod \"olm-operator-6b444d44fb-4qmwv\" (UID: \"4f0edb6a-4994-4385-b66a-7bf8fad44d60\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951973 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5qqt\" (UniqueName: \"kubernetes.io/projected/44e4d3ad-4a18-4b32-a575-79829e3cd784-kube-api-access-v5qqt\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.951999 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/13129e04-c3d9-4387-bd36-b673b082d90e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bcsnm\" (UID: \"13129e04-c3d9-4387-bd36-b673b082d90e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952029 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4073598c-2443-4558-8eb3-d7a5b6a15c75-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xj8mn\" (UID: \"4073598c-2443-4558-8eb3-d7a5b6a15c75\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952051 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-registration-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952082 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-csi-data-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952107 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-socket-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952145 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tjhx\" (UniqueName: \"kubernetes.io/projected/e27acd41-ceb3-439f-b3f7-6ad547609158-kube-api-access-6tjhx\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952169 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f0e84263-7736-4b18-a955-7fadfb307294-proxy-tls\") pod \"machine-config-controller-84d6567774-5ljlg\" (UID: \"f0e84263-7736-4b18-a955-7fadfb307294\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952213 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4frp\" (UniqueName: \"kubernetes.io/projected/13129e04-c3d9-4387-bd36-b673b082d90e-kube-api-access-x4frp\") pod \"control-plane-machine-set-operator-78cbb6b69f-bcsnm\" (UID: \"13129e04-c3d9-4387-bd36-b673b082d90e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952239 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-966kl\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952268 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbt9m\" (UniqueName: \"kubernetes.io/projected/f0e84263-7736-4b18-a955-7fadfb307294-kube-api-access-pbt9m\") pod \"machine-config-controller-84d6567774-5ljlg\" (UID: \"f0e84263-7736-4b18-a955-7fadfb307294\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952292 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b2badeb2-df5f-473e-b74a-5948ce933a4b-etcd-client\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952325 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb-srv-cert\") pod \"catalog-operator-68c6474976-rn5jb\" (UID: \"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952348 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f3bfee-47a2-4347-872f-b4da6aeb68e5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-kmpxb\" (UID: \"49f3bfee-47a2-4347-872f-b4da6aeb68e5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952377 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952402 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75810b59-18d3-400b-ab38-25d3dcf7cea5-webhook-cert\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952417 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b2badeb2-df5f-473e-b74a-5948ce933a4b-etcd-ca\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952439 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4d2516c-521d-4e48-a547-81c06a8cda3e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8g477\" (UID: \"c4d2516c-521d-4e48-a547-81c06a8cda3e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952486 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7dc7b0ca-d149-48fa-acac-a7835087634b-cert\") pod \"ingress-canary-bjtq8\" (UID: \"7dc7b0ca-d149-48fa-acac-a7835087634b\") " pod="openshift-ingress-canary/ingress-canary-bjtq8" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952513 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f0e84263-7736-4b18-a955-7fadfb307294-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5ljlg\" (UID: \"f0e84263-7736-4b18-a955-7fadfb307294\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952536 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/436a9ea3-c8a5-4445-89bb-21133fe76d23-serving-cert\") pod \"service-ca-operator-777779d784-mcstk\" (UID: \"436a9ea3-c8a5-4445-89bb-21133fe76d23\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952561 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e34f8aa9-54c5-4964-a481-ff6745ec54d8-secret-volume\") pod \"collect-profiles-29520780-nkg88\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952585 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/75810b59-18d3-400b-ab38-25d3dcf7cea5-tmpfs\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952608 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3612ec61-6d09-4cf6-abc7-aa0258e232ea-service-ca-bundle\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952631 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f3bfee-47a2-4347-872f-b4da6aeb68e5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-kmpxb\" (UID: \"49f3bfee-47a2-4347-872f-b4da6aeb68e5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952673 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e27acd41-ceb3-439f-b3f7-6ad547609158-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952699 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-966kl\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952721 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3612ec61-6d09-4cf6-abc7-aa0258e232ea-default-certificate\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952752 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znvv5\" (UniqueName: \"kubernetes.io/projected/49f3bfee-47a2-4347-872f-b4da6aeb68e5-kube-api-access-znvv5\") pod \"kube-storage-version-migrator-operator-b67b599dd-kmpxb\" (UID: \"49f3bfee-47a2-4347-872f-b4da6aeb68e5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.952775 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.953118 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4d2516c-521d-4e48-a547-81c06a8cda3e-config\") pod \"kube-apiserver-operator-766d6c64bb-8g477\" (UID: \"c4d2516c-521d-4e48-a547-81c06a8cda3e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.953402 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.954405 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3612ec61-6d09-4cf6-abc7-aa0258e232ea-metrics-certs\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.954791 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/436a9ea3-c8a5-4445-89bb-21133fe76d23-config\") pod \"service-ca-operator-777779d784-mcstk\" (UID: \"436a9ea3-c8a5-4445-89bb-21133fe76d23\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.955152 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/75810b59-18d3-400b-ab38-25d3dcf7cea5-tmpfs\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.955187 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49f3bfee-47a2-4347-872f-b4da6aeb68e5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-kmpxb\" (UID: \"49f3bfee-47a2-4347-872f-b4da6aeb68e5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.955968 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3612ec61-6d09-4cf6-abc7-aa0258e232ea-service-ca-bundle\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.955992 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f0e84263-7736-4b18-a955-7fadfb307294-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5ljlg\" (UID: \"f0e84263-7736-4b18-a955-7fadfb307294\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.956101 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75810b59-18d3-400b-ab38-25d3dcf7cea5-apiservice-cert\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.956689 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0d109fdb-3df5-4760-88d6-7e63ceb417e6-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rtjrp\" (UID: \"0d109fdb-3df5-4760-88d6-7e63ceb417e6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.957253 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-images\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.957253 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7ae3017b-8a59-4109-83c2-191b8f54831c-signing-cabundle\") pod \"service-ca-9c57cc56f-8gs8z\" (UID: \"7ae3017b-8a59-4109-83c2-191b8f54831c\") " pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.957721 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ac5acee1-9801-43d5-ab3e-16a00d6f98a2-certs\") pod \"machine-config-server-ltptv\" (UID: \"ac5acee1-9801-43d5-ab3e-16a00d6f98a2\") " pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.957840 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4073598c-2443-4558-8eb3-d7a5b6a15c75-config\") pod \"kube-controller-manager-operator-78b949d7b-xj8mn\" (UID: \"4073598c-2443-4558-8eb3-d7a5b6a15c75\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.957961 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-registration-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.958116 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7dc7b0ca-d149-48fa-acac-a7835087634b-cert\") pod \"ingress-canary-bjtq8\" (UID: \"7dc7b0ca-d149-48fa-acac-a7835087634b\") " pod="openshift-ingress-canary/ingress-canary-bjtq8" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.958165 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2badeb2-df5f-473e-b74a-5948ce933a4b-serving-cert\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: E0216 13:05:42.958308 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.458296223 +0000 UTC m=+142.785009951 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.958406 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b2badeb2-df5f-473e-b74a-5948ce933a4b-etcd-service-ca\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.958443 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb-srv-cert\") pod \"catalog-operator-68c6474976-rn5jb\" (UID: \"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.958515 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-csi-data-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.958565 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-socket-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.958797 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e34f8aa9-54c5-4964-a481-ff6745ec54d8-config-volume\") pod \"collect-profiles-29520780-nkg88\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.958901 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/436a9ea3-c8a5-4445-89bb-21133fe76d23-serving-cert\") pod \"service-ca-operator-777779d784-mcstk\" (UID: \"436a9ea3-c8a5-4445-89bb-21133fe76d23\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.959723 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f3bfee-47a2-4347-872f-b4da6aeb68e5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-kmpxb\" (UID: \"49f3bfee-47a2-4347-872f-b4da6aeb68e5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.960333 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ac5acee1-9801-43d5-ab3e-16a00d6f98a2-node-bootstrap-token\") pod \"machine-config-server-ltptv\" (UID: \"ac5acee1-9801-43d5-ab3e-16a00d6f98a2\") " pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.960601 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7ae3017b-8a59-4109-83c2-191b8f54831c-signing-key\") pod \"service-ca-9c57cc56f-8gs8z\" (UID: \"7ae3017b-8a59-4109-83c2-191b8f54831c\") " pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.961172 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-844md\" (UID: \"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.961436 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f0e84263-7736-4b18-a955-7fadfb307294-proxy-tls\") pod \"machine-config-controller-84d6567774-5ljlg\" (UID: \"f0e84263-7736-4b18-a955-7fadfb307294\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.961810 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e34f8aa9-54c5-4964-a481-ff6745ec54d8-secret-volume\") pod \"collect-profiles-29520780-nkg88\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.962105 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/22166fdb-00e3-43e2-9a1d-d75446964277-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-86gqx\" (UID: \"22166fdb-00e3-43e2-9a1d-d75446964277\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.962295 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e27acd41-ceb3-439f-b3f7-6ad547609158-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.962580 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-966kl\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.963115 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/13129e04-c3d9-4387-bd36-b673b082d90e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bcsnm\" (UID: \"13129e04-c3d9-4387-bd36-b673b082d90e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.963683 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-844md\" (UID: \"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.963907 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4d2516c-521d-4e48-a547-81c06a8cda3e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8g477\" (UID: \"c4d2516c-521d-4e48-a547-81c06a8cda3e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.963965 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-plugins-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.963965 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/7be610ca-65bb-4abc-809f-37a005b6f491-mountpoint-dir\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.964349 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2badeb2-df5f-473e-b74a-5948ce933a4b-config\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.965357 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75810b59-18d3-400b-ab38-25d3dcf7cea5-webhook-cert\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.965370 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb-profile-collector-cert\") pod \"catalog-operator-68c6474976-rn5jb\" (UID: \"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.965476 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/44e4d3ad-4a18-4b32-a575-79829e3cd784-metrics-tls\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.965708 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-966kl\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.965356 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e27acd41-ceb3-439f-b3f7-6ad547609158-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.965881 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b2badeb2-df5f-473e-b74a-5948ce933a4b-etcd-client\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.966025 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd522\" (UniqueName: \"kubernetes.io/projected/28235c00-42f1-4935-9b42-c055518c28d3-kube-api-access-jd522\") pod \"machine-api-operator-5694c8668f-9mmht\" (UID: \"28235c00-42f1-4935-9b42-c055518c28d3\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.966221 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3612ec61-6d09-4cf6-abc7-aa0258e232ea-stats-auth\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.966332 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/96c042d0-6736-4ff8-ae21-d07442236cec-config-volume\") pod \"dns-default-p9dwz\" (UID: \"96c042d0-6736-4ff8-ae21-d07442236cec\") " pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.967248 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-proxy-tls\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.967268 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96c042d0-6736-4ff8-ae21-d07442236cec-metrics-tls\") pod \"dns-default-p9dwz\" (UID: \"96c042d0-6736-4ff8-ae21-d07442236cec\") " pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.967252 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4f0edb6a-4994-4385-b66a-7bf8fad44d60-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4qmwv\" (UID: \"4f0edb6a-4994-4385-b66a-7bf8fad44d60\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.967466 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3612ec61-6d09-4cf6-abc7-aa0258e232ea-default-certificate\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.976974 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2tll\" (UniqueName: \"kubernetes.io/projected/4034cf38-9c2d-43ef-89fb-f4898f7ad8fb-kube-api-access-d2tll\") pod \"apiserver-7bbb656c7d-vcm42\" (UID: \"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.984812 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:42 crc kubenswrapper[4816]: I0216 13:05:42.996956 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbrdg\" (UniqueName: \"kubernetes.io/projected/0c171de2-9344-4919-986f-e6544cb7cf0a-kube-api-access-tbrdg\") pod \"machine-approver-56656f9798-x8vcr\" (UID: \"0c171de2-9344-4919-986f-e6544cb7cf0a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.001829 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.019477 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxvr5\" (UniqueName: \"kubernetes.io/projected/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-kube-api-access-qxvr5\") pod \"controller-manager-879f6c89f-jjn42\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.046965 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.047307 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvdz6\" (UniqueName: \"kubernetes.io/projected/ef1e487f-08d8-4ba7-805a-68cf93ce434e-kube-api-access-bvdz6\") pod \"console-operator-58897d9998-vrcl2\" (UID: \"ef1e487f-08d8-4ba7-805a-68cf93ce434e\") " pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.053799 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.054091 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.554068095 +0000 UTC m=+142.880781823 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.054380 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.055865 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.555841774 +0000 UTC m=+142.882555542 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.056419 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.060151 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n529c\" (UniqueName: \"kubernetes.io/projected/6265fff8-9c3e-4f9c-84d6-4b304047d4b0-kube-api-access-n529c\") pod \"openshift-apiserver-operator-796bbdcf4f-ff4c9\" (UID: \"6265fff8-9c3e-4f9c-84d6-4b304047d4b0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.065975 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.079524 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-c6kz6" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.082047 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d95b2\" (UniqueName: \"kubernetes.io/projected/d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb-kube-api-access-d95b2\") pod \"openshift-controller-manager-operator-756b6f6bc6-f58zp\" (UID: \"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.092974 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.102200 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr4hx\" (UniqueName: \"kubernetes.io/projected/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-kube-api-access-pr4hx\") pod \"oauth-openshift-558db77b4-brmt2\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.113340 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.120090 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwtnj\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-kube-api-access-kwtnj\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.138874 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.154364 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spkpm\" (UniqueName: \"kubernetes.io/projected/32f87458-d8fa-446e-ab8e-2b349e1152ad-kube-api-access-spkpm\") pod \"openshift-config-operator-7777fb866f-2wpnx\" (UID: \"32f87458-d8fa-446e-ab8e-2b349e1152ad\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.155265 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.157610 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.657585702 +0000 UTC m=+142.984299430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.158104 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.158559 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.658548298 +0000 UTC m=+142.985262026 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.162174 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxqj8\" (UniqueName: \"kubernetes.io/projected/4f6a7c47-55df-4e3a-b2dc-34eceff64b90-kube-api-access-xxqj8\") pod \"authentication-operator-69f744f599-h72gw\" (UID: \"4f6a7c47-55df-4e3a-b2dc-34eceff64b90\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.188211 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2ddt\" (UniqueName: \"kubernetes.io/projected/89f428fd-8717-4819-81d8-ee04443b38a5-kube-api-access-s2ddt\") pod \"console-f9d7485db-f2dr7\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.200598 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s"] Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.204430 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-bound-sa-token\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.224590 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8dst\" (UniqueName: \"kubernetes.io/projected/ada6fd48-493b-41ec-aee5-dd526d65dcdb-kube-api-access-n8dst\") pod \"dns-operator-744455d44c-l9v8l\" (UID: \"ada6fd48-493b-41ec-aee5-dd526d65dcdb\") " pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" Feb 16 13:05:43 crc kubenswrapper[4816]: W0216 13:05:43.236397 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cae9b2f_0c46_4058_8b87_8d8cf933246c.slice/crio-a94163f3400e4635beef5be0a92f0ef34b6f1f7ecf9f4e39da312c2f9c462de5 WatchSource:0}: Error finding container a94163f3400e4635beef5be0a92f0ef34b6f1f7ecf9f4e39da312c2f9c462de5: Status 404 returned error can't find the container with id a94163f3400e4635beef5be0a92f0ef34b6f1f7ecf9f4e39da312c2f9c462de5 Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.250776 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.250995 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22jc8\" (UniqueName: \"kubernetes.io/projected/a12efaa8-e8bf-47eb-b019-d5e1fe136221-kube-api-access-22jc8\") pod \"apiserver-76f77b778f-l7q8r\" (UID: \"a12efaa8-e8bf-47eb-b019-d5e1fe136221\") " pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.259752 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.260418 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.76039946 +0000 UTC m=+143.087113198 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.282211 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppffv\" (UniqueName: \"kubernetes.io/projected/0d109fdb-3df5-4760-88d6-7e63ceb417e6-kube-api-access-ppffv\") pod \"multus-admission-controller-857f4d67dd-rtjrp\" (UID: \"0d109fdb-3df5-4760-88d6-7e63ceb417e6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.293722 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.325429 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gbwq\" (UniqueName: \"kubernetes.io/projected/62b5f1bb-5470-48b7-8816-0fa127e5a4b8-kube-api-access-7gbwq\") pod \"machine-config-operator-74547568cd-bksbg\" (UID: \"62b5f1bb-5470-48b7-8816-0fa127e5a4b8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.338169 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9djzp\" (UniqueName: \"kubernetes.io/projected/96c042d0-6736-4ff8-ae21-d07442236cec-kube-api-access-9djzp\") pod \"dns-default-p9dwz\" (UID: \"96c042d0-6736-4ff8-ae21-d07442236cec\") " pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.338930 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k42s\" (UniqueName: \"kubernetes.io/projected/ac5acee1-9801-43d5-ab3e-16a00d6f98a2-kube-api-access-6k42s\") pod \"machine-config-server-ltptv\" (UID: \"ac5acee1-9801-43d5-ab3e-16a00d6f98a2\") " pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.359305 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jjn42"] Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.361796 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.362129 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.862109016 +0000 UTC m=+143.188822815 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.362586 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lrtd\" (UniqueName: \"kubernetes.io/projected/9f11459f-3567-4ed0-8bf3-d55ca3507378-kube-api-access-8lrtd\") pod \"marketplace-operator-79b997595-966kl\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.365883 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42"] Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.371882 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.385761 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z69sc\" (UniqueName: \"kubernetes.io/projected/75810b59-18d3-400b-ab38-25d3dcf7cea5-kube-api-access-z69sc\") pod \"packageserver-d55dfcdfc-hpzh6\" (UID: \"75810b59-18d3-400b-ab38-25d3dcf7cea5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.387109 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" Feb 16 13:05:43 crc kubenswrapper[4816]: W0216 13:05:43.398484 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0153ac39_6d42_4f8d_9279_1c38e9f8fc6d.slice/crio-d8873de42a5f6fc0a08613aba38c6c1b2e17188fff43ebcb318747a534166fc6 WatchSource:0}: Error finding container d8873de42a5f6fc0a08613aba38c6c1b2e17188fff43ebcb318747a534166fc6: Status 404 returned error can't find the container with id d8873de42a5f6fc0a08613aba38c6c1b2e17188fff43ebcb318747a534166fc6 Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.401673 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.405517 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsqrt\" (UniqueName: \"kubernetes.io/projected/e34f8aa9-54c5-4964-a481-ff6745ec54d8-kube-api-access-gsqrt\") pod \"collect-profiles-29520780-nkg88\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.423465 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8vmp\" (UniqueName: \"kubernetes.io/projected/7ae3017b-8a59-4109-83c2-191b8f54831c-kube-api-access-m8vmp\") pod \"service-ca-9c57cc56f-8gs8z\" (UID: \"7ae3017b-8a59-4109-83c2-191b8f54831c\") " pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.423769 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.447765 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfrtt\" (UniqueName: \"kubernetes.io/projected/963d9667-805f-49b3-a315-e61e0f6718bf-kube-api-access-wfrtt\") pod \"migrator-59844c95c7-vgdfk\" (UID: \"963d9667-805f-49b3-a315-e61e0f6718bf\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.462356 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.463240 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:43.963218547 +0000 UTC m=+143.289932275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.476789 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.506312 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.520491 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.523313 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkg5b\" (UniqueName: \"kubernetes.io/projected/003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb-kube-api-access-pkg5b\") pod \"catalog-operator-68c6474976-rn5jb\" (UID: \"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.532737 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crtjd\" (UniqueName: \"kubernetes.io/projected/4f0edb6a-4994-4385-b66a-7bf8fad44d60-kube-api-access-crtjd\") pod \"olm-operator-6b444d44fb-4qmwv\" (UID: \"4f0edb6a-4994-4385-b66a-7bf8fad44d60\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.532988 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.535011 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9mmht"] Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.535916 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4d2516c-521d-4e48-a547-81c06a8cda3e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8g477\" (UID: \"c4d2516c-521d-4e48-a547-81c06a8cda3e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.536381 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-c6kz6"] Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.538924 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp"] Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.548805 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tjhx\" (UniqueName: \"kubernetes.io/projected/e27acd41-ceb3-439f-b3f7-6ad547609158-kube-api-access-6tjhx\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.549089 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.560252 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bct74\" (UniqueName: \"kubernetes.io/projected/b2badeb2-df5f-473e-b74a-5948ce933a4b-kube-api-access-bct74\") pod \"etcd-operator-b45778765-jhhtj\" (UID: \"b2badeb2-df5f-473e-b74a-5948ce933a4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.560375 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.564374 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.564857 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.564956 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.064937205 +0000 UTC m=+143.391650933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.573881 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.586291 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4073598c-2443-4558-8eb3-d7a5b6a15c75-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xj8mn\" (UID: \"4073598c-2443-4558-8eb3-d7a5b6a15c75\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.587139 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.590045 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5qqt\" (UniqueName: \"kubernetes.io/projected/44e4d3ad-4a18-4b32-a575-79829e3cd784-kube-api-access-v5qqt\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.594547 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vrcl2"] Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.599433 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e27acd41-ceb3-439f-b3f7-6ad547609158-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pstjh\" (UID: \"e27acd41-ceb3-439f-b3f7-6ad547609158\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.618697 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.619104 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrtrc\" (UniqueName: \"kubernetes.io/projected/3612ec61-6d09-4cf6-abc7-aa0258e232ea-kube-api-access-mrtrc\") pod \"router-default-5444994796-lwmcd\" (UID: \"3612ec61-6d09-4cf6-abc7-aa0258e232ea\") " pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.624179 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ltptv" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.645375 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4frp\" (UniqueName: \"kubernetes.io/projected/13129e04-c3d9-4387-bd36-b673b082d90e-kube-api-access-x4frp\") pod \"control-plane-machine-set-operator-78cbb6b69f-bcsnm\" (UID: \"13129e04-c3d9-4387-bd36-b673b082d90e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" Feb 16 13:05:43 crc kubenswrapper[4816]: W0216 13:05:43.646247 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd06da9e4_b2e2_42f5_ae9f_67be3df9e0fb.slice/crio-25717befb4802b9d2a4332729f76b682322044f9ef753039a40849f0546cad01 WatchSource:0}: Error finding container 25717befb4802b9d2a4332729f76b682322044f9ef753039a40849f0546cad01: Status 404 returned error can't find the container with id 25717befb4802b9d2a4332729f76b682322044f9ef753039a40849f0546cad01 Feb 16 13:05:43 crc kubenswrapper[4816]: W0216 13:05:43.654099 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0196136d_4ea7_4015_97d2_c885db51c66f.slice/crio-1741e8515f3d612a653110809eebff8592857354b4448eee2477ef49498fe278 WatchSource:0}: Error finding container 1741e8515f3d612a653110809eebff8592857354b4448eee2477ef49498fe278: Status 404 returned error can't find the container with id 1741e8515f3d612a653110809eebff8592857354b4448eee2477ef49498fe278 Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.659042 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbt9m\" (UniqueName: \"kubernetes.io/projected/f0e84263-7736-4b18-a955-7fadfb307294-kube-api-access-pbt9m\") pod \"machine-config-controller-84d6567774-5ljlg\" (UID: \"f0e84263-7736-4b18-a955-7fadfb307294\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:43 crc kubenswrapper[4816]: W0216 13:05:43.665847 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef1e487f_08d8_4ba7_805a_68cf93ce434e.slice/crio-dc70a8499dab3ad639db44159fdee9deac5c9c86fdbf9e39604e2ed176dd88bb WatchSource:0}: Error finding container dc70a8499dab3ad639db44159fdee9deac5c9c86fdbf9e39604e2ed176dd88bb: Status 404 returned error can't find the container with id dc70a8499dab3ad639db44159fdee9deac5c9c86fdbf9e39604e2ed176dd88bb Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.666405 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.666497 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.166483297 +0000 UTC m=+143.493197025 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.666742 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.667001 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.166991531 +0000 UTC m=+143.493705259 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.679062 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znvv5\" (UniqueName: \"kubernetes.io/projected/49f3bfee-47a2-4347-872f-b4da6aeb68e5-kube-api-access-znvv5\") pod \"kube-storage-version-migrator-operator-b67b599dd-kmpxb\" (UID: \"49f3bfee-47a2-4347-872f-b4da6aeb68e5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.699787 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7"] Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.705136 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwb2v\" (UniqueName: \"kubernetes.io/projected/22166fdb-00e3-43e2-9a1d-d75446964277-kube-api-access-gwb2v\") pod \"package-server-manager-789f6589d5-86gqx\" (UID: \"22166fdb-00e3-43e2-9a1d-d75446964277\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.721038 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbkkn\" (UniqueName: \"kubernetes.io/projected/436a9ea3-c8a5-4445-89bb-21133fe76d23-kube-api-access-mbkkn\") pod \"service-ca-operator-777779d784-mcstk\" (UID: \"436a9ea3-c8a5-4445-89bb-21133fe76d23\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.740625 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44e4d3ad-4a18-4b32-a575-79829e3cd784-bound-sa-token\") pod \"ingress-operator-5b745b69d9-tpb4x\" (UID: \"44e4d3ad-4a18-4b32-a575-79829e3cd784\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.747903 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.751899 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.757674 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.759370 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-844md\" (UID: \"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.763807 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.773780 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.774227 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.27420447 +0000 UTC m=+143.600918198 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.774886 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.785325 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr8s8\" (UniqueName: \"kubernetes.io/projected/7dc7b0ca-d149-48fa-acac-a7835087634b-kube-api-access-hr8s8\") pod \"ingress-canary-bjtq8\" (UID: \"7dc7b0ca-d149-48fa-acac-a7835087634b\") " pod="openshift-ingress-canary/ingress-canary-bjtq8" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.786498 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.792310 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.798999 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.813055 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.815870 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wvbp\" (UniqueName: \"kubernetes.io/projected/7be610ca-65bb-4abc-809f-37a005b6f491-kube-api-access-9wvbp\") pod \"csi-hostpathplugin-p66fg\" (UID: \"7be610ca-65bb-4abc-809f-37a005b6f491\") " pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:43 crc kubenswrapper[4816]: W0216 13:05:43.821918 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac5acee1_9801_43d5_ab3e_16a00d6f98a2.slice/crio-2e4b70f8bd1b115f1c450482d50e3b47d6e74e262d087812cdbdf8b5b9509c6d WatchSource:0}: Error finding container 2e4b70f8bd1b115f1c450482d50e3b47d6e74e262d087812cdbdf8b5b9509c6d: Status 404 returned error can't find the container with id 2e4b70f8bd1b115f1c450482d50e3b47d6e74e262d087812cdbdf8b5b9509c6d Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.827917 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.835880 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.866779 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.875238 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.875570 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.375557217 +0000 UTC m=+143.702270935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.888562 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.895738 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-bjtq8" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.914064 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-p66fg" Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.976630 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.976925 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.476886104 +0000 UTC m=+143.803599832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:43 crc kubenswrapper[4816]: I0216 13:05:43.977123 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:43 crc kubenswrapper[4816]: E0216 13:05:43.977419 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.477405968 +0000 UTC m=+143.804119696 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.077676 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.077868 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.57784053 +0000 UTC m=+143.904554258 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.078366 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.078683 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.578669563 +0000 UTC m=+143.905383291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.082863 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-c6kz6" event={"ID":"0196136d-4ea7-4015-97d2-c885db51c66f","Type":"ContainerStarted","Data":"1741e8515f3d612a653110809eebff8592857354b4448eee2477ef49498fe278"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.089720 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" event={"ID":"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d","Type":"ContainerStarted","Data":"763560e421874c542fa45a6d4288f1a203475533514a7016238a720216705c9f"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.089763 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" event={"ID":"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d","Type":"ContainerStarted","Data":"d8873de42a5f6fc0a08613aba38c6c1b2e17188fff43ebcb318747a534166fc6"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.089944 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.103197 4816 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-jjn42 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.103244 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.103611 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" event={"ID":"28235c00-42f1-4935-9b42-c055518c28d3","Type":"ContainerStarted","Data":"d8c0c8380d474916a74aa2390cd0497c7a461e9bf84b995a36442be767e71402"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.111264 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx"] Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.113442 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" event={"ID":"0c171de2-9344-4919-986f-e6544cb7cf0a","Type":"ContainerStarted","Data":"f2fddc4a0bc4855eae8b9fa9f44e85afead0188e64f09c7b0ef84e19225ab45e"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.113481 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" event={"ID":"0c171de2-9344-4919-986f-e6544cb7cf0a","Type":"ContainerStarted","Data":"336596a421b33e97761dc265401288b8d2461ae4e885be5b3584ee9bc8f100f7"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.120885 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ltptv" event={"ID":"ac5acee1-9801-43d5-ab3e-16a00d6f98a2","Type":"ContainerStarted","Data":"2e4b70f8bd1b115f1c450482d50e3b47d6e74e262d087812cdbdf8b5b9509c6d"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.124554 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" event={"ID":"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb","Type":"ContainerStarted","Data":"25717befb4802b9d2a4332729f76b682322044f9ef753039a40849f0546cad01"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.125878 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" event={"ID":"1cae9b2f-0c46-4058-8b87-8d8cf933246c","Type":"ContainerStarted","Data":"adf048060150afd0625ec77a6f055cb29f89fe8b27b89181b427346b8a9ae9ae"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.125900 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" event={"ID":"1cae9b2f-0c46-4058-8b87-8d8cf933246c","Type":"ContainerStarted","Data":"a94163f3400e4635beef5be0a92f0ef34b6f1f7ecf9f4e39da312c2f9c462de5"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.126175 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.127779 4816 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9pl8s container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.127822 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.129139 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-vrcl2" event={"ID":"ef1e487f-08d8-4ba7-805a-68cf93ce434e","Type":"ContainerStarted","Data":"dc70a8499dab3ad639db44159fdee9deac5c9c86fdbf9e39604e2ed176dd88bb"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.130454 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" event={"ID":"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb","Type":"ContainerStarted","Data":"3f136b0ef123d3face05b16ff55721f4531112235da19eed1aee93c4da87caf1"} Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.179055 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.179306 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.679270659 +0000 UTC m=+144.005984397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.179381 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.179732 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.679714881 +0000 UTC m=+144.006428699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.280745 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.280865 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.780847572 +0000 UTC m=+144.107561300 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.281116 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.281431 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.781420308 +0000 UTC m=+144.108134036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.300662 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9"] Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.307257 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-f2dr7"] Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.320624 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-brmt2"] Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.320688 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-l9v8l"] Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.420213 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.420496 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:44.920481423 +0000 UTC m=+144.247195151 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: W0216 13:05:44.457894 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podada6fd48_493b_41ec_aee5_dd526d65dcdb.slice/crio-7933f7b7922a8ed9bf6fa8ba2f13ad3d2aa0b6d60683cc0f9f81c57dc9c99fbd WatchSource:0}: Error finding container 7933f7b7922a8ed9bf6fa8ba2f13ad3d2aa0b6d60683cc0f9f81c57dc9c99fbd: Status 404 returned error can't find the container with id 7933f7b7922a8ed9bf6fa8ba2f13ad3d2aa0b6d60683cc0f9f81c57dc9c99fbd Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.468261 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-h72gw"] Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.521090 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.521428 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.021417478 +0000 UTC m=+144.348131206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.616764 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" podStartSLOduration=118.61674815800001 podStartE2EDuration="1m58.616748158s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:44.616037999 +0000 UTC m=+143.942751727" watchObservedRunningTime="2026-02-16 13:05:44.616748158 +0000 UTC m=+143.943461886" Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.623638 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.623974 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.123959979 +0000 UTC m=+144.450673707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.725376 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.725615 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.225604334 +0000 UTC m=+144.552318062 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.827460 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.828236 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.328219965 +0000 UTC m=+144.654933693 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.857090 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-966kl"] Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.857977 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" podStartSLOduration=119.857964542 podStartE2EDuration="1m59.857964542s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:44.853768066 +0000 UTC m=+144.180481824" watchObservedRunningTime="2026-02-16 13:05:44.857964542 +0000 UTC m=+144.184678270" Feb 16 13:05:44 crc kubenswrapper[4816]: W0216 13:05:44.866512 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f11459f_3567_4ed0_8bf3_d55ca3507378.slice/crio-3d5e424ba7a68d97a32fb49c7f526601376795f9f808f6e0ead729b996475a89 WatchSource:0}: Error finding container 3d5e424ba7a68d97a32fb49c7f526601376795f9f808f6e0ead729b996475a89: Status 404 returned error can't find the container with id 3d5e424ba7a68d97a32fb49c7f526601376795f9f808f6e0ead729b996475a89 Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.929496 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:44 crc kubenswrapper[4816]: E0216 13:05:44.932003 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.43198088 +0000 UTC m=+144.758694658 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.987558 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l7q8r"] Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.989530 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg"] Feb 16 13:05:44 crc kubenswrapper[4816]: I0216 13:05:44.994097 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rtjrp"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.010198 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.031294 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.031673 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.53164407 +0000 UTC m=+144.858357798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: W0216 13:05:45.049612 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda12efaa8_e8bf_47eb_b019_d5e1fe136221.slice/crio-f984adc6479a39971e984169766d118d8bb0174cb0afc49e2055af66f1e9813c WatchSource:0}: Error finding container f984adc6479a39971e984169766d118d8bb0174cb0afc49e2055af66f1e9813c: Status 404 returned error can't find the container with id f984adc6479a39971e984169766d118d8bb0174cb0afc49e2055af66f1e9813c Feb 16 13:05:45 crc kubenswrapper[4816]: W0216 13:05:45.052692 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62b5f1bb_5470_48b7_8816_0fa127e5a4b8.slice/crio-ada015ddba0b3907418e6160e0281d5ce1fc4c23e229827fcf0571d3a2031a68 WatchSource:0}: Error finding container ada015ddba0b3907418e6160e0281d5ce1fc4c23e229827fcf0571d3a2031a68: Status 404 returned error can't find the container with id ada015ddba0b3907418e6160e0281d5ce1fc4c23e229827fcf0571d3a2031a68 Feb 16 13:05:45 crc kubenswrapper[4816]: W0216 13:05:45.055014 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d109fdb_3df5_4760_88d6_7e63ceb417e6.slice/crio-8d94230be7ec3cf33ea787a98bfc67c4b9e516ace809fc374040da49411760d3 WatchSource:0}: Error finding container 8d94230be7ec3cf33ea787a98bfc67c4b9e516ace809fc374040da49411760d3: Status 404 returned error can't find the container with id 8d94230be7ec3cf33ea787a98bfc67c4b9e516ace809fc374040da49411760d3 Feb 16 13:05:45 crc kubenswrapper[4816]: W0216 13:05:45.055792 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod963d9667_805f_49b3_a315_e61e0f6718bf.slice/crio-f096d613c49413b14eb00cfc9bc37f0e043bacb780236b4f422ec8c6101f1905 WatchSource:0}: Error finding container f096d613c49413b14eb00cfc9bc37f0e043bacb780236b4f422ec8c6101f1905: Status 404 returned error can't find the container with id f096d613c49413b14eb00cfc9bc37f0e043bacb780236b4f422ec8c6101f1905 Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.133071 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.133486 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.633470989 +0000 UTC m=+144.960184717 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.142404 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" event={"ID":"d06da9e4-b2e2-42f5-ae9f-67be3df9e0fb","Type":"ContainerStarted","Data":"83251b1410e5f50aa8ff2afa9415bf2182ffeded81fb7a8e70bb045e12de84f5"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.159824 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.163583 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.166259 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ltptv" event={"ID":"ac5acee1-9801-43d5-ab3e-16a00d6f98a2","Type":"ContainerStarted","Data":"75988d909451bf7a43fd2002acd05f0456813f86bd2aa24f22444e57b479703c"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.166560 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-p9dwz"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.171163 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" event={"ID":"4f6a7c47-55df-4e3a-b2dc-34eceff64b90","Type":"ContainerStarted","Data":"9d21aab38da6bf525008189407409603696e155e857775c5718fcf18b46878c8"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.172412 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk" event={"ID":"963d9667-805f-49b3-a315-e61e0f6718bf","Type":"ContainerStarted","Data":"f096d613c49413b14eb00cfc9bc37f0e043bacb780236b4f422ec8c6101f1905"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.174131 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" event={"ID":"28235c00-42f1-4935-9b42-c055518c28d3","Type":"ContainerStarted","Data":"018fc3c3c0e5aa9ebdc4dbd779917a27d19a54e2bc0f774f4ff5aaf431e4d08c"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.176151 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" event={"ID":"ada6fd48-493b-41ec-aee5-dd526d65dcdb","Type":"ContainerStarted","Data":"7933f7b7922a8ed9bf6fa8ba2f13ad3d2aa0b6d60683cc0f9f81c57dc9c99fbd"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.177957 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" event={"ID":"6265fff8-9c3e-4f9c-84d6-4b304047d4b0","Type":"ContainerStarted","Data":"3f135563c304f2420e74de77f4abfc4b8d010d133447e1b339cf4ce97bc77351"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.178647 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" event={"ID":"9f11459f-3567-4ed0-8bf3-d55ca3507378","Type":"ContainerStarted","Data":"3d5e424ba7a68d97a32fb49c7f526601376795f9f808f6e0ead729b996475a89"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.180572 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" event={"ID":"32f87458-d8fa-446e-ab8e-2b349e1152ad","Type":"ContainerStarted","Data":"e064e5e00191aecb3f0904e294a0e2bac74bb9b1d365295c8bcbecc82438dbda"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.180629 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" event={"ID":"32f87458-d8fa-446e-ab8e-2b349e1152ad","Type":"ContainerStarted","Data":"c0f9007c3d3138da6e870e905a7845361892280f8671e66fc9d563f3642e4ff0"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.181981 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" event={"ID":"e4461b7c-5e7f-4ac9-bf37-2510584b4eb9","Type":"ContainerStarted","Data":"11c4410fac2667e5bd82e797529f59e5601c84f7038b187dd3695a6be8060ef7"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.182412 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.184281 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" event={"ID":"f850653e-8cb2-44e3-9ef5-bbba590bbf9c","Type":"ContainerStarted","Data":"27bf0bb4c7a7d5a18b5cbba874da19c784ab3dcc1641b04a46a73967c041c409"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.186204 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-c6kz6" event={"ID":"0196136d-4ea7-4015-97d2-c885db51c66f","Type":"ContainerStarted","Data":"ac24b850b4861f57c36cdcbe36f9b3fef9661fa746fa400e599f157da9bd5fa0"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.186421 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-c6kz6" Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.188688 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-vrcl2" event={"ID":"ef1e487f-08d8-4ba7-805a-68cf93ce434e","Type":"ContainerStarted","Data":"4abcde63b9045b222a4df69a9d7b7d2692b71c2e3da6574ad2198a44e2c6a7b7"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.189883 4816 patch_prober.go:28] interesting pod/downloads-7954f5f757-c6kz6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.189926 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-c6kz6" podUID="0196136d-4ea7-4015-97d2-c885db51c66f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.192989 4816 generic.go:334] "Generic (PLEG): container finished" podID="4034cf38-9c2d-43ef-89fb-f4898f7ad8fb" containerID="ff7f338be0eaa5c467c28564ff261ea88d53f93246333d22be19d036dc516328" exitCode=0 Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.193058 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" event={"ID":"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb","Type":"ContainerDied","Data":"ff7f338be0eaa5c467c28564ff261ea88d53f93246333d22be19d036dc516328"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.197088 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-lwmcd" event={"ID":"3612ec61-6d09-4cf6-abc7-aa0258e232ea","Type":"ContainerStarted","Data":"9d9a0b1c33cf5265e51a2032ed204d45714b75148471c145efdcfc485e4760f1"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.197122 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-lwmcd" event={"ID":"3612ec61-6d09-4cf6-abc7-aa0258e232ea","Type":"ContainerStarted","Data":"dc6f41eb58a6e6c9a9155e888857084e8750be378e0702330ce8bb5e9453eb42"} Feb 16 13:05:45 crc kubenswrapper[4816]: W0216 13:05:45.198541 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode34f8aa9_54c5_4964_a481_ff6745ec54d8.slice/crio-b4be6e048c5e6247d95cb8ca377442563bf92ea757e478497bdf7446b71e56b2 WatchSource:0}: Error finding container b4be6e048c5e6247d95cb8ca377442563bf92ea757e478497bdf7446b71e56b2: Status 404 returned error can't find the container with id b4be6e048c5e6247d95cb8ca377442563bf92ea757e478497bdf7446b71e56b2 Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.201570 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" event={"ID":"a12efaa8-e8bf-47eb-b019-d5e1fe136221","Type":"ContainerStarted","Data":"f984adc6479a39971e984169766d118d8bb0174cb0afc49e2055af66f1e9813c"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.208902 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.214114 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f2dr7" event={"ID":"89f428fd-8717-4819-81d8-ee04443b38a5","Type":"ContainerStarted","Data":"dd56cd12c0e97ab9fb375b75c9c53cb8c97a39a6ab22187cb826702982c5d998"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.216141 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" event={"ID":"62b5f1bb-5470-48b7-8816-0fa127e5a4b8","Type":"ContainerStarted","Data":"ada015ddba0b3907418e6160e0281d5ce1fc4c23e229827fcf0571d3a2031a68"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.217556 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" event={"ID":"0d109fdb-3df5-4760-88d6-7e63ceb417e6","Type":"ContainerStarted","Data":"8d94230be7ec3cf33ea787a98bfc67c4b9e516ace809fc374040da49411760d3"} Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.218431 4816 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-jjn42 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.218484 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.218585 4816 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9pl8s container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.218627 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.235252 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.235420 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.735379153 +0000 UTC m=+145.062092881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.236153 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.237453 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.73743949 +0000 UTC m=+145.064153218 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: W0216 13:05:45.244719 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75810b59_18d3_400b_ab38_25d3dcf7cea5.slice/crio-89a25526f7d83a8d22d7d69348cd95d7af7a2818697211f95eeaa515431898e8 WatchSource:0}: Error finding container 89a25526f7d83a8d22d7d69348cd95d7af7a2818697211f95eeaa515431898e8: Status 404 returned error can't find the container with id 89a25526f7d83a8d22d7d69348cd95d7af7a2818697211f95eeaa515431898e8 Feb 16 13:05:45 crc kubenswrapper[4816]: W0216 13:05:45.293171 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44e4d3ad_4a18_4b32_a575_79829e3cd784.slice/crio-4cee5975daf03bc79ba56bfc3b0a77f665afe40c80be3e613ae5f2d6725da03a WatchSource:0}: Error finding container 4cee5975daf03bc79ba56bfc3b0a77f665afe40c80be3e613ae5f2d6725da03a: Status 404 returned error can't find the container with id 4cee5975daf03bc79ba56bfc3b0a77f665afe40c80be3e613ae5f2d6725da03a Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.337592 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.337898 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.837870021 +0000 UTC m=+145.164583749 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.338153 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.340160 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.840146444 +0000 UTC m=+145.166860172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.395904 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.421756 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.421787 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.431020 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.434091 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-bjtq8"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.437422 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.442235 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.442531 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.94250852 +0000 UTC m=+145.269222248 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.442614 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.442693 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg"] Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.443030 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:45.943018754 +0000 UTC m=+145.269732482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.450426 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mcstk"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.543171 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.543443 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-c6kz6" podStartSLOduration=120.543424414 podStartE2EDuration="2m0.543424414s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:45.504306177 +0000 UTC m=+144.831019905" watchObservedRunningTime="2026-02-16 13:05:45.543424414 +0000 UTC m=+144.870138142" Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.543499 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.043486596 +0000 UTC m=+145.370200324 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.544018 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-ltptv" podStartSLOduration=5.54400777 podStartE2EDuration="5.54400777s" podCreationTimestamp="2026-02-16 13:05:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:45.543043804 +0000 UTC m=+144.869757532" watchObservedRunningTime="2026-02-16 13:05:45.54400777 +0000 UTC m=+144.870721498" Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.644954 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.645373 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.145357307 +0000 UTC m=+145.472071045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.723087 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.728622 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-8gs8z"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.734842 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.739497 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.745350 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jhhtj"] Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.745924 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.746169 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.246139358 +0000 UTC m=+145.572853086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.746256 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.776395 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-p66fg"] Feb 16 13:05:45 crc kubenswrapper[4816]: E0216 13:05:45.781539 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.281526782 +0000 UTC m=+145.608240510 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:45 crc kubenswrapper[4816]: I0216 13:05:45.872269 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:45.873734 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.373709824 +0000 UTC m=+145.700423552 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.180319 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.181416 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.68118853 +0000 UTC m=+146.007902258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.281616 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.282096 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.782051593 +0000 UTC m=+146.108765321 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.282339 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.282968 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.782953659 +0000 UTC m=+146.109667387 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: W0216 13:05:46.295468 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2badeb2_df5f_473e_b74a_5948ce933a4b.slice/crio-c9482fbc2188fca1822b78957756a4038ce631750c205ce2ef6c13fdea8c6b7b WatchSource:0}: Error finding container c9482fbc2188fca1822b78957756a4038ce631750c205ce2ef6c13fdea8c6b7b: Status 404 returned error can't find the container with id c9482fbc2188fca1822b78957756a4038ce631750c205ce2ef6c13fdea8c6b7b Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.317274 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" event={"ID":"4f6a7c47-55df-4e3a-b2dc-34eceff64b90","Type":"ContainerStarted","Data":"d7cfe3396c05b955f2cf8a1e71b11fbb97e04982d92697769ce2c28bd0a67fe6"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.327030 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f2dr7" event={"ID":"89f428fd-8717-4819-81d8-ee04443b38a5","Type":"ContainerStarted","Data":"2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.347146 4816 generic.go:334] "Generic (PLEG): container finished" podID="32f87458-d8fa-446e-ab8e-2b349e1152ad" containerID="e064e5e00191aecb3f0904e294a0e2bac74bb9b1d365295c8bcbecc82438dbda" exitCode=0 Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.347246 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" event={"ID":"32f87458-d8fa-446e-ab8e-2b349e1152ad","Type":"ContainerDied","Data":"e064e5e00191aecb3f0904e294a0e2bac74bb9b1d365295c8bcbecc82438dbda"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.354279 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" event={"ID":"f850653e-8cb2-44e3-9ef5-bbba590bbf9c","Type":"ContainerStarted","Data":"986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.354439 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.357420 4816 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-brmt2 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" start-of-body= Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.357469 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" podUID="f850653e-8cb2-44e3-9ef5-bbba590bbf9c" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.369794 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" event={"ID":"ada6fd48-493b-41ec-aee5-dd526d65dcdb","Type":"ContainerStarted","Data":"846140345b0a97f4157c512309896eed6069eda29b968681d0068a30fffd8f91"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.382906 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" event={"ID":"6265fff8-9c3e-4f9c-84d6-4b304047d4b0","Type":"ContainerStarted","Data":"0b1607d2aa7bc43b0605d3c16a7695f966f9ec7996e3dd60f33013d31eb5690b"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.383844 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.384129 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.88411428 +0000 UTC m=+146.210828008 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.386536 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" event={"ID":"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395","Type":"ContainerStarted","Data":"6050f5f47a257bb32eaf0480a15a1b54198c752d9b1f9b6cd167663f0fb10752"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.422029 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" event={"ID":"22166fdb-00e3-43e2-9a1d-d75446964277","Type":"ContainerStarted","Data":"840433c21dd65445ed8e2f061c47e3d97300c766bbdf0065ced994af5121b82d"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.431161 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" event={"ID":"c4d2516c-521d-4e48-a547-81c06a8cda3e","Type":"ContainerStarted","Data":"99258988ff9d8dac7b6d81486a340146299850deb0fc346ec71a73387ff580ee"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.436647 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" event={"ID":"4f0edb6a-4994-4385-b66a-7bf8fad44d60","Type":"ContainerStarted","Data":"2f2acec89ab992c36a5f80c16d302f91049cc72e59946587e604e04711e6394b"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.436744 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.438890 4816 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-4qmwv container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.438934 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" podUID="4f0edb6a-4994-4385-b66a-7bf8fad44d60" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.450787 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" event={"ID":"436a9ea3-c8a5-4445-89bb-21133fe76d23","Type":"ContainerStarted","Data":"a59512240bf4f8af7a459d4d1f7d3e27e18f2961d7649293c113722d20869b28"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.455478 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" event={"ID":"75810b59-18d3-400b-ab38-25d3dcf7cea5","Type":"ContainerStarted","Data":"89a25526f7d83a8d22d7d69348cd95d7af7a2818697211f95eeaa515431898e8"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.460014 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" event={"ID":"9f11459f-3567-4ed0-8bf3-d55ca3507378","Type":"ContainerStarted","Data":"5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.461078 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.467287 4816 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-966kl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.467349 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" podUID="9f11459f-3567-4ed0-8bf3-d55ca3507378" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.474262 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" event={"ID":"62b5f1bb-5470-48b7-8816-0fa127e5a4b8","Type":"ContainerStarted","Data":"a44473c47d6f4037321d4393fcac33f1f747a166f60feb3491821a90c9766d77"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.486994 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" event={"ID":"13129e04-c3d9-4387-bd36-b673b082d90e","Type":"ContainerStarted","Data":"f109e158c12d3901af39e03c5945f87a4ac214b76f0efbca5f7df8fd8bac3060"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.491794 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.493604 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:46.993586003 +0000 UTC m=+146.320299811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.520966 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" event={"ID":"0c171de2-9344-4919-986f-e6544cb7cf0a","Type":"ContainerStarted","Data":"d671dda14d2a5395fcec54231bbe7eedc61e8e9445f2fe8816b896c613b0aa3e"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.522528 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" event={"ID":"e27acd41-ceb3-439f-b3f7-6ad547609158","Type":"ContainerStarted","Data":"008a6cf828e9de68191c8ddc18302044445ab2ec35edb99fe8214ba7429a7e4e"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.524173 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" event={"ID":"44e4d3ad-4a18-4b32-a575-79829e3cd784","Type":"ContainerStarted","Data":"4cee5975daf03bc79ba56bfc3b0a77f665afe40c80be3e613ae5f2d6725da03a"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.525748 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" podStartSLOduration=120.525736676 podStartE2EDuration="2m0.525736676s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.495423074 +0000 UTC m=+145.822136802" watchObservedRunningTime="2026-02-16 13:05:46.525736676 +0000 UTC m=+145.852450404" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.526050 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ff4c9" podStartSLOduration=121.526045095 podStartE2EDuration="2m1.526045095s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.524870602 +0000 UTC m=+145.851584330" watchObservedRunningTime="2026-02-16 13:05:46.526045095 +0000 UTC m=+145.852758823" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.539276 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" event={"ID":"e34f8aa9-54c5-4964-a481-ff6745ec54d8","Type":"ContainerStarted","Data":"b4be6e048c5e6247d95cb8ca377442563bf92ea757e478497bdf7446b71e56b2"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.552254 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" podStartSLOduration=120.552234663 podStartE2EDuration="2m0.552234663s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.54851877 +0000 UTC m=+145.875232498" watchObservedRunningTime="2026-02-16 13:05:46.552234663 +0000 UTC m=+145.878948381" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.566169 4816 generic.go:334] "Generic (PLEG): container finished" podID="a12efaa8-e8bf-47eb-b019-d5e1fe136221" containerID="553b3a99e876aa19e3e8598a9ffd16e26ad8b97055abd3d51527578bc8fc653f" exitCode=0 Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.566288 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" event={"ID":"a12efaa8-e8bf-47eb-b019-d5e1fe136221","Type":"ContainerDied","Data":"553b3a99e876aa19e3e8598a9ffd16e26ad8b97055abd3d51527578bc8fc653f"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.568507 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-bjtq8" event={"ID":"7dc7b0ca-d149-48fa-acac-a7835087634b","Type":"ContainerStarted","Data":"783ee097c32a773ce03b4da12633e989798c13ae1d448225786bbb4aec54e352"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.593647 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.594637 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.0945909 +0000 UTC m=+146.421304638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.595019 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.595572 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.095553187 +0000 UTC m=+146.422266915 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.631859 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" podStartSLOduration=121.631837675 podStartE2EDuration="2m1.631837675s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.629719967 +0000 UTC m=+145.956433695" watchObservedRunningTime="2026-02-16 13:05:46.631837675 +0000 UTC m=+145.958551413" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.634943 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" event={"ID":"e4461b7c-5e7f-4ac9-bf37-2510584b4eb9","Type":"ContainerStarted","Data":"0c5b3245a1124113fb24021a02c229ab8c703f3a3ca08e244f116efb0dddf3ba"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.639386 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" event={"ID":"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb","Type":"ContainerStarted","Data":"0a3e7bed851a7c85cb65b8d900d7683d98524ff4bc26dd0a7d289c23dd95ce2f"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.644745 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-p9dwz" event={"ID":"96c042d0-6736-4ff8-ae21-d07442236cec","Type":"ContainerStarted","Data":"e2a41a731c8345b2809bbe49e5490f54b4e1b3df3bd9ab4c7fd9e5cdff45ba04"} Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.645568 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.645623 4816 patch_prober.go:28] interesting pod/downloads-7954f5f757-c6kz6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.645665 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-c6kz6" podUID="0196136d-4ea7-4015-97d2-c885db51c66f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.652842 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-f2dr7" podStartSLOduration=121.652817759 podStartE2EDuration="2m1.652817759s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.650208166 +0000 UTC m=+145.976921894" watchObservedRunningTime="2026-02-16 13:05:46.652817759 +0000 UTC m=+145.979531487" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.655351 4816 patch_prober.go:28] interesting pod/console-operator-58897d9998-vrcl2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/readyz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.655402 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-vrcl2" podUID="ef1e487f-08d8-4ba7-805a-68cf93ce434e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.25:8443/readyz\": dial tcp 10.217.0.25:8443: connect: connection refused" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.678862 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" podStartSLOduration=120.678847022 podStartE2EDuration="2m0.678847022s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.677041981 +0000 UTC m=+146.003755719" watchObservedRunningTime="2026-02-16 13:05:46.678847022 +0000 UTC m=+146.005560750" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.697862 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.698020 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.198004074 +0000 UTC m=+146.524717802 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.698463 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.698998 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.198981001 +0000 UTC m=+146.525694729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.713757 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-h72gw" podStartSLOduration=121.713717042 podStartE2EDuration="2m1.713717042s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.693279643 +0000 UTC m=+146.019993371" watchObservedRunningTime="2026-02-16 13:05:46.713717042 +0000 UTC m=+146.040430770" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.716276 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" podStartSLOduration=121.716267031 podStartE2EDuration="2m1.716267031s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.712217009 +0000 UTC m=+146.038930737" watchObservedRunningTime="2026-02-16 13:05:46.716267031 +0000 UTC m=+146.042980759" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.778995 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.789326 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:46 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:46 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:46 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.789371 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.795418 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f58zp" podStartSLOduration=121.795404702 podStartE2EDuration="2m1.795404702s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.768958826 +0000 UTC m=+146.095672554" watchObservedRunningTime="2026-02-16 13:05:46.795404702 +0000 UTC m=+146.122118430" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.796796 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" podStartSLOduration=121.79678849 podStartE2EDuration="2m1.79678849s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.794734142 +0000 UTC m=+146.121447900" watchObservedRunningTime="2026-02-16 13:05:46.79678849 +0000 UTC m=+146.123502218" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.802648 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.804893 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.304851484 +0000 UTC m=+146.631565222 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.823524 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x8vcr" podStartSLOduration=121.823508693 podStartE2EDuration="2m1.823508693s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.822471163 +0000 UTC m=+146.149184891" watchObservedRunningTime="2026-02-16 13:05:46.823508693 +0000 UTC m=+146.150222411" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.868956 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-vrcl2" podStartSLOduration=121.868908435 podStartE2EDuration="2m1.868908435s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.857423665 +0000 UTC m=+146.184137393" watchObservedRunningTime="2026-02-16 13:05:46.868908435 +0000 UTC m=+146.195622163" Feb 16 13:05:46 crc kubenswrapper[4816]: I0216 13:05:46.905518 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:46 crc kubenswrapper[4816]: E0216 13:05:46.905981 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.405960174 +0000 UTC m=+146.732673972 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.006631 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.007043 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.506990372 +0000 UTC m=+146.833704110 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.111202 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.111891 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.611861107 +0000 UTC m=+146.938574835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.217274 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.217577 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.717543814 +0000 UTC m=+147.044257542 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.217915 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.218481 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.71846347 +0000 UTC m=+147.045177208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.319374 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.319512 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.819492348 +0000 UTC m=+147.146206076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.319582 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.319926 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.81991858 +0000 UTC m=+147.146632308 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.356951 4816 csr.go:261] certificate signing request csr-8d9h8 is approved, waiting to be issued Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.364585 4816 csr.go:257] certificate signing request csr-8d9h8 is issued Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.421179 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.421607 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:47.921583795 +0000 UTC m=+147.248297533 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.522260 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.522704 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.022681075 +0000 UTC m=+147.349394873 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.623772 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.623917 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.123894378 +0000 UTC m=+147.450608106 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.624011 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.624297 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.12428613 +0000 UTC m=+147.450999858 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.665958 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" event={"ID":"4034cf38-9c2d-43ef-89fb-f4898f7ad8fb","Type":"ContainerStarted","Data":"04bcd580f27cf035e4a9096e6b0933f94ba0535f399753f214b3e43652dc7e2d"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.670002 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" event={"ID":"22166fdb-00e3-43e2-9a1d-d75446964277","Type":"ContainerStarted","Data":"67aebd25fa1f63c8afe1672ab8aabf014d269b005929831f40bdbad7cb97fe38"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.670044 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" event={"ID":"22166fdb-00e3-43e2-9a1d-d75446964277","Type":"ContainerStarted","Data":"4eb393f2b2871670a73d8ad9e95f038af624c5ec0fdfa3662a25a44d06aa50cb"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.670418 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.672634 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" event={"ID":"44e4d3ad-4a18-4b32-a575-79829e3cd784","Type":"ContainerStarted","Data":"f0b69098f7521325eb603c71852e00b0c0ca14f2b26bc5236632075c038f2468"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.672743 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" event={"ID":"44e4d3ad-4a18-4b32-a575-79829e3cd784","Type":"ContainerStarted","Data":"95c68bc0043f390e17953b23ffb4098dee95863edf527b6b920831934957582c"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.680018 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-p9dwz" event={"ID":"96c042d0-6736-4ff8-ae21-d07442236cec","Type":"ContainerStarted","Data":"1bb61e8d242892cc6b363376b4cdf613da1e875466c32b68ed9425c52f27362b"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.680119 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-p9dwz" event={"ID":"96c042d0-6736-4ff8-ae21-d07442236cec","Type":"ContainerStarted","Data":"63d5d9d7ab980ed634d343da66b209ab210ffac5071f6fbb592754355e6ff3d9"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.680150 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.683688 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" podStartSLOduration=121.68367006 podStartE2EDuration="2m1.68367006s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.679840333 +0000 UTC m=+147.006554061" watchObservedRunningTime="2026-02-16 13:05:47.68367006 +0000 UTC m=+147.010383788" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.683986 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-lwmcd" podStartSLOduration=122.683980839 podStartE2EDuration="2m2.683980839s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:46.896325437 +0000 UTC m=+146.223039155" watchObservedRunningTime="2026-02-16 13:05:47.683980839 +0000 UTC m=+147.010694557" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.684356 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" event={"ID":"4f0edb6a-4994-4385-b66a-7bf8fad44d60","Type":"ContainerStarted","Data":"87dd9bdcb162ef905db26ef4c3d41b73ffd050cea6a8a14030a7df29acbba13b"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.685232 4816 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-4qmwv container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.685383 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" podUID="4f0edb6a-4994-4385-b66a-7bf8fad44d60" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.688460 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" event={"ID":"f0e84263-7736-4b18-a955-7fadfb307294","Type":"ContainerStarted","Data":"231d41aa5eefde7810b2203d743e393ef31e703cc87f9860d0014658f5e16cea"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.688502 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" event={"ID":"f0e84263-7736-4b18-a955-7fadfb307294","Type":"ContainerStarted","Data":"1810e70a49448f0c2f2e1d1d4340f988c3c103199b18f091b8244bde0e86d155"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.688513 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" event={"ID":"f0e84263-7736-4b18-a955-7fadfb307294","Type":"ContainerStarted","Data":"0cf7bef911aeddc46726040a51c8d7241013adf1a7413e7e0c37a115bd9726fa"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.696311 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" event={"ID":"b2badeb2-df5f-473e-b74a-5948ce933a4b","Type":"ContainerStarted","Data":"da8abf9c4fb2e5302e580c45ba45d2057da98ab6e2ca74b494ea8cf23d34169a"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.696364 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" event={"ID":"b2badeb2-df5f-473e-b74a-5948ce933a4b","Type":"ContainerStarted","Data":"c9482fbc2188fca1822b78957756a4038ce631750c205ce2ef6c13fdea8c6b7b"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.697451 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tpb4x" podStartSLOduration=122.697433193 podStartE2EDuration="2m2.697433193s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.696605059 +0000 UTC m=+147.023318797" watchObservedRunningTime="2026-02-16 13:05:47.697433193 +0000 UTC m=+147.024146921" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.700732 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" event={"ID":"3fb528f3-2b50-4ba1-82dc-dfb3ed4e9395","Type":"ContainerStarted","Data":"037da33b427b19918f2150112047ee65f2511e6c6e9e9c8195893249949a40e3"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.703912 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" event={"ID":"4073598c-2443-4558-8eb3-d7a5b6a15c75","Type":"ContainerStarted","Data":"8a14a2b2f0963ea321acb388e6df6732153ddfe017c333b6074cf3b3e40378d3"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.704402 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" event={"ID":"4073598c-2443-4558-8eb3-d7a5b6a15c75","Type":"ContainerStarted","Data":"9a5949e0c43fb00c3d8a7c99ba83db63895b6d33b5e0170525648b6b1df7b4cd"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.714144 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" event={"ID":"62b5f1bb-5470-48b7-8816-0fa127e5a4b8","Type":"ContainerStarted","Data":"690891382e37f57e0158ed28fa6e42942f80f312ca7c3927fc1218691cf94652"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.714977 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" podStartSLOduration=121.71495716 podStartE2EDuration="2m1.71495716s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.714753894 +0000 UTC m=+147.041467632" watchObservedRunningTime="2026-02-16 13:05:47.71495716 +0000 UTC m=+147.041670888" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.719621 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" event={"ID":"49f3bfee-47a2-4347-872f-b4da6aeb68e5","Type":"ContainerStarted","Data":"a4da7958a01484188c38fa57f501b0f8bcd18e496da526a54c9699fbc7841e78"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.719724 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" event={"ID":"49f3bfee-47a2-4347-872f-b4da6aeb68e5","Type":"ContainerStarted","Data":"fefd6f73e2733f4abd6a5ed6629f7b0cd3c519bd6d2d2ecd96061a884334db2f"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.723406 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" event={"ID":"e27acd41-ceb3-439f-b3f7-6ad547609158","Type":"ContainerStarted","Data":"68a79159406fc34d91d5574544a443d13320da9533e7b75a4ba3c3ae98ee6c8b"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.724753 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.725086 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.225070411 +0000 UTC m=+147.551784139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.730130 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" event={"ID":"003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb","Type":"ContainerStarted","Data":"3fa5e5088e7414c60bdebee51a3abbb6bd5b4b80d9a7464ecf6b5f1eef1a6012"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.730847 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.733204 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" event={"ID":"32f87458-d8fa-446e-ab8e-2b349e1152ad","Type":"ContainerStarted","Data":"f63d93b144ddb3c019781ea620c7c35befbb6b240dac64b82072fe6ee83a68ee"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.733239 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.738081 4816 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-rn5jb container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.738165 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" podUID="003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.740049 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" event={"ID":"c4d2516c-521d-4e48-a547-81c06a8cda3e","Type":"ContainerStarted","Data":"d89b6ad67283bf8cabc54c325d8171672fdac807569928990c3b5b2869080dce"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.744164 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-844md" podStartSLOduration=122.74413333 podStartE2EDuration="2m2.74413333s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.738986778 +0000 UTC m=+147.065700516" watchObservedRunningTime="2026-02-16 13:05:47.74413333 +0000 UTC m=+147.070847058" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.747957 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t9xz7" event={"ID":"e4461b7c-5e7f-4ac9-bf37-2510584b4eb9","Type":"ContainerStarted","Data":"59bf89d7cede0ca51440140b61b6dd3d0abf0920de57f3b2fb5da0aefb2c644c"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.770547 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xj8mn" podStartSLOduration=122.770530644 podStartE2EDuration="2m2.770530644s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.769815965 +0000 UTC m=+147.096529703" watchObservedRunningTime="2026-02-16 13:05:47.770530644 +0000 UTC m=+147.097244372" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.775253 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" event={"ID":"ada6fd48-493b-41ec-aee5-dd526d65dcdb","Type":"ContainerStarted","Data":"2e9211175734d686d286311e916fc5ba7d2b0828bee716890e624f06127bc18a"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.778733 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:47 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:47 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:47 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.778787 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.789996 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" event={"ID":"75810b59-18d3-400b-ab38-25d3dcf7cea5","Type":"ContainerStarted","Data":"8da8ca3f31d3558bbcee2bed6704b00392268bf85a75a34fd97607344788f4d0"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.790804 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-jhhtj" podStartSLOduration=122.790789347 podStartE2EDuration="2m2.790789347s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.789998515 +0000 UTC m=+147.116712243" watchObservedRunningTime="2026-02-16 13:05:47.790789347 +0000 UTC m=+147.117503075" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.791042 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.800039 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" event={"ID":"a12efaa8-e8bf-47eb-b019-d5e1fe136221","Type":"ContainerStarted","Data":"72740f7c857a1437808e8343a38cd8d2c3ca2b5cb1fdd139dcd3fb7dc13303c8"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.803639 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" event={"ID":"7ae3017b-8a59-4109-83c2-191b8f54831c","Type":"ContainerStarted","Data":"d58e633c7d1773dc7bbaf4ab57d149f58847cb764701b6429056a954cdd24260"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.803707 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" event={"ID":"7ae3017b-8a59-4109-83c2-191b8f54831c","Type":"ContainerStarted","Data":"4e396579527ab3561a666a7b0e6879e566bda9165e80d753e5431d65c3b1458e"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.806951 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" event={"ID":"28235c00-42f1-4935-9b42-c055518c28d3","Type":"ContainerStarted","Data":"1585654cbf5691f7c42a1f6f3fedbb693045fad24c5656677b178296aa6a541b"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.810703 4816 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-hpzh6 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" start-of-body= Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.810749 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" podUID="75810b59-18d3-400b-ab38-25d3dcf7cea5" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.814418 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bksbg" podStartSLOduration=121.814391603 podStartE2EDuration="2m1.814391603s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.811560814 +0000 UTC m=+147.138274542" watchObservedRunningTime="2026-02-16 13:05:47.814391603 +0000 UTC m=+147.141105331" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.819187 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" event={"ID":"436a9ea3-c8a5-4445-89bb-21133fe76d23","Type":"ContainerStarted","Data":"096bce885bff445d36631186f627ca23f46d4829d194da8838da32b55dc0489f"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.826928 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.827257 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.32724196 +0000 UTC m=+147.653955688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.844972 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-p9dwz" podStartSLOduration=7.844936262 podStartE2EDuration="7.844936262s" podCreationTimestamp="2026-02-16 13:05:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.835422278 +0000 UTC m=+147.162136026" watchObservedRunningTime="2026-02-16 13:05:47.844936262 +0000 UTC m=+147.171649990" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.852696 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-bjtq8" event={"ID":"7dc7b0ca-d149-48fa-acac-a7835087634b","Type":"ContainerStarted","Data":"925dc30a1a42079eb008f2a53db9800d6e2e5cd86941c1bf86aacbf92503a70e"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.861244 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ljlg" podStartSLOduration=122.861223175 podStartE2EDuration="2m2.861223175s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.860522476 +0000 UTC m=+147.187236204" watchObservedRunningTime="2026-02-16 13:05:47.861223175 +0000 UTC m=+147.187936903" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.868139 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p66fg" event={"ID":"7be610ca-65bb-4abc-809f-37a005b6f491","Type":"ContainerStarted","Data":"ddaf841efd7f6f57696741a315f7912bad971f8b88a4fd29454ffc0e4a70141f"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.894688 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" event={"ID":"e34f8aa9-54c5-4964-a481-ff6745ec54d8","Type":"ContainerStarted","Data":"14530c301f50b9ac1dac510e1d92fcd118b840d004c315d661a4f261c670ab7f"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.905171 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-l9v8l" podStartSLOduration=122.905156726 podStartE2EDuration="2m2.905156726s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.904018204 +0000 UTC m=+147.230731932" watchObservedRunningTime="2026-02-16 13:05:47.905156726 +0000 UTC m=+147.231870454" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.915610 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk" event={"ID":"963d9667-805f-49b3-a315-e61e0f6718bf","Type":"ContainerStarted","Data":"f40c0528c7acccff6145a3a2d697df512dd60e3f91219042ef539789832ccf6a"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.915679 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk" event={"ID":"963d9667-805f-49b3-a315-e61e0f6718bf","Type":"ContainerStarted","Data":"a03cfa3ebba8c94b9b2805b29023aadb8bcf9a3f9c3bb2bea9296b37603497ef"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.928803 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:47 crc kubenswrapper[4816]: E0216 13:05:47.930440 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.430420238 +0000 UTC m=+147.757133976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.933444 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" event={"ID":"13129e04-c3d9-4387-bd36-b673b082d90e","Type":"ContainerStarted","Data":"53c5aaebcd3808534c578b4378cb803223a27bcd156143c9538b9ba16ef6c11d"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.935943 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-kmpxb" podStartSLOduration=121.935921471 podStartE2EDuration="2m1.935921471s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.928157905 +0000 UTC m=+147.254871633" watchObservedRunningTime="2026-02-16 13:05:47.935921471 +0000 UTC m=+147.262635189" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.947573 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcstk" podStartSLOduration=121.947556234 podStartE2EDuration="2m1.947556234s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.945781485 +0000 UTC m=+147.272495213" watchObservedRunningTime="2026-02-16 13:05:47.947556234 +0000 UTC m=+147.274269962" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.950840 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" event={"ID":"0d109fdb-3df5-4760-88d6-7e63ceb417e6","Type":"ContainerStarted","Data":"c817b6ea662ff317e63c0df42220cea4d69fdae65cccb5c0f0e298951d4911a9"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.950868 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" event={"ID":"0d109fdb-3df5-4760-88d6-7e63ceb417e6","Type":"ContainerStarted","Data":"5133d5b977501c2333bbc8e76c84c82b432d1528632c0a5598f0694dd6e1bc73"} Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.952101 4816 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-966kl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.952131 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" podUID="9f11459f-3567-4ed0-8bf3-d55ca3507378" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Feb 16 13:05:47 crc kubenswrapper[4816]: I0216 13:05:47.983747 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8g477" podStartSLOduration=122.98373334 podStartE2EDuration="2m2.98373334s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:47.981610131 +0000 UTC m=+147.308323859" watchObservedRunningTime="2026-02-16 13:05:47.98373334 +0000 UTC m=+147.310447068" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.030353 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.032344 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.53233215 +0000 UTC m=+147.859045878 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.032575 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pstjh" podStartSLOduration=123.032559057 podStartE2EDuration="2m3.032559057s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:48.013396294 +0000 UTC m=+147.340110022" watchObservedRunningTime="2026-02-16 13:05:48.032559057 +0000 UTC m=+147.359272785" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.033509 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-bjtq8" podStartSLOduration=8.033505434 podStartE2EDuration="8.033505434s" podCreationTimestamp="2026-02-16 13:05:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:48.032873065 +0000 UTC m=+147.359586793" watchObservedRunningTime="2026-02-16 13:05:48.033505434 +0000 UTC m=+147.360219162" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.055325 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-8gs8z" podStartSLOduration=122.055305779 podStartE2EDuration="2m2.055305779s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:48.054738573 +0000 UTC m=+147.381452311" watchObservedRunningTime="2026-02-16 13:05:48.055305779 +0000 UTC m=+147.382019507" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.066799 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.066851 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.109509 4816 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-vcm42 container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.10:8443/livez\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.109648 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" podUID="4034cf38-9c2d-43ef-89fb-f4898f7ad8fb" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.10:8443/livez\": dial tcp 10.217.0.10:8443: connect: connection refused" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.144243 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.144958 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-9mmht" podStartSLOduration=122.14492995 podStartE2EDuration="2m2.14492995s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:48.09312554 +0000 UTC m=+147.419839268" watchObservedRunningTime="2026-02-16 13:05:48.14492995 +0000 UTC m=+147.471643678" Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.145384 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.645348852 +0000 UTC m=+147.972062590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.146004 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" podStartSLOduration=122.1459983 podStartE2EDuration="2m2.1459983s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:48.142727379 +0000 UTC m=+147.469441107" watchObservedRunningTime="2026-02-16 13:05:48.1459983 +0000 UTC m=+147.472712038" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.146754 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.149422 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.649407625 +0000 UTC m=+147.976121353 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.174801 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" podStartSLOduration=123.17477276 podStartE2EDuration="2m3.17477276s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:48.168503786 +0000 UTC m=+147.495217514" watchObservedRunningTime="2026-02-16 13:05:48.17477276 +0000 UTC m=+147.501486508" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.192261 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vgdfk" podStartSLOduration=122.192232405 podStartE2EDuration="2m2.192232405s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:48.190376114 +0000 UTC m=+147.517089852" watchObservedRunningTime="2026-02-16 13:05:48.192232405 +0000 UTC m=+147.518946133" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.248533 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.248912 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.74889067 +0000 UTC m=+148.075604398 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.350312 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.350978 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.850945736 +0000 UTC m=+148.177659634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.358191 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-rtjrp" podStartSLOduration=122.358164127 podStartE2EDuration="2m2.358164127s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:48.356039838 +0000 UTC m=+147.682753566" watchObservedRunningTime="2026-02-16 13:05:48.358164127 +0000 UTC m=+147.684877855" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.358625 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bcsnm" podStartSLOduration=122.35861844 podStartE2EDuration="2m2.35861844s" podCreationTimestamp="2026-02-16 13:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:48.25753328 +0000 UTC m=+147.584247008" watchObservedRunningTime="2026-02-16 13:05:48.35861844 +0000 UTC m=+147.685332168" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.359506 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-vrcl2" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.365616 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-16 13:00:47 +0000 UTC, rotation deadline is 2026-11-19 07:43:54.697185241 +0000 UTC Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.365691 4816 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6618h38m6.331497975s for next certificate rotation Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.451036 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.451286 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:48.951272425 +0000 UTC m=+148.277986153 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.518523 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.551918 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.552495 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.052470977 +0000 UTC m=+148.379184705 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.653691 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.653958 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.153916007 +0000 UTC m=+148.480629735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.654223 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.654777 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.15476581 +0000 UTC m=+148.481479538 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.756044 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.756317 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.256286493 +0000 UTC m=+148.583000221 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.756551 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.756987 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.256976051 +0000 UTC m=+148.583689809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.777937 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:48 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:48 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:48 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.778007 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.857633 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.857810 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.357784853 +0000 UTC m=+148.684498581 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.858021 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.858327 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.358319168 +0000 UTC m=+148.685032896 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.962257 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:48 crc kubenswrapper[4816]: E0216 13:05:48.962783 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.462763521 +0000 UTC m=+148.789477249 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.977953 4816 generic.go:334] "Generic (PLEG): container finished" podID="e34f8aa9-54c5-4964-a481-ff6745ec54d8" containerID="14530c301f50b9ac1dac510e1d92fcd118b840d004c315d661a4f261c670ab7f" exitCode=0 Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.978048 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" event={"ID":"e34f8aa9-54c5-4964-a481-ff6745ec54d8","Type":"ContainerDied","Data":"14530c301f50b9ac1dac510e1d92fcd118b840d004c315d661a4f261c670ab7f"} Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.985183 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" event={"ID":"a12efaa8-e8bf-47eb-b019-d5e1fe136221","Type":"ContainerStarted","Data":"0e1a1e8db865926bdedee4e03c3b2d0097ef69c751598e03c3d4535db38c1012"} Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.986956 4816 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-4qmwv container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.986999 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" podUID="4f0edb6a-4994-4385-b66a-7bf8fad44d60" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.994111 4816 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-rn5jb container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Feb 16 13:05:48 crc kubenswrapper[4816]: I0216 13:05:48.994268 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" podUID="003ad0c0-6b1e-4c4e-9eca-bec5263fd3bb" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.055894 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" podStartSLOduration=124.055873519 podStartE2EDuration="2m4.055873519s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:49.054211913 +0000 UTC m=+148.380925781" watchObservedRunningTime="2026-02-16 13:05:49.055873519 +0000 UTC m=+148.382587247" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.064549 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.074320 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.574301181 +0000 UTC m=+148.901014909 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.167638 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.167827 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.667796 +0000 UTC m=+148.994509728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.167994 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.168280 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.668258773 +0000 UTC m=+148.994972501 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.268837 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.269088 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.269130 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.269166 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.269202 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.269803 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.769772334 +0000 UTC m=+149.096486082 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.270428 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.275922 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.280593 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.290387 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.370087 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.370572 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.870552655 +0000 UTC m=+149.197266433 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.440058 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.454371 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.462627 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.471414 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.471670 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.971622084 +0000 UTC m=+149.298335812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.471723 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.472284 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:49.972268212 +0000 UTC m=+149.298982000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.572456 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.572578 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.072552259 +0000 UTC m=+149.399265987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.572803 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.573151 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.073137435 +0000 UTC m=+149.399851153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.674365 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.674590 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.174546314 +0000 UTC m=+149.501260042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.674949 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.675251 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.175238974 +0000 UTC m=+149.501952702 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.785314 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.785704 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.285685293 +0000 UTC m=+149.612399021 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.787300 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:49 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:49 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:49 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.787340 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.888677 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.889420 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.389404686 +0000 UTC m=+149.716118424 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.990223 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:49 crc kubenswrapper[4816]: E0216 13:05:49.990493 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.490478015 +0000 UTC m=+149.817191743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.991534 4816 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-hpzh6 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 16 13:05:49 crc kubenswrapper[4816]: I0216 13:05:49.991589 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" podUID="75810b59-18d3-400b-ab38-25d3dcf7cea5" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 16 13:05:50 crc kubenswrapper[4816]: W0216 13:05:50.062304 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-f69ed999636624ce70e54e91749786d8d00c3cc8f5d4ea92fbf7051983516a28 WatchSource:0}: Error finding container f69ed999636624ce70e54e91749786d8d00c3cc8f5d4ea92fbf7051983516a28: Status 404 returned error can't find the container with id f69ed999636624ce70e54e91749786d8d00c3cc8f5d4ea92fbf7051983516a28 Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.092394 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.097503 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.597483989 +0000 UTC m=+149.924197787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.193992 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.194579 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.694563137 +0000 UTC m=+150.021276865 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.295281 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.295638 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.795627036 +0000 UTC m=+150.122340764 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.396021 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.396579 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.896562922 +0000 UTC m=+150.223276650 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.497289 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.497614 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:50.99759963 +0000 UTC m=+150.324313358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.564158 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.598192 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e34f8aa9-54c5-4964-a481-ff6745ec54d8-secret-volume\") pod \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.598278 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e34f8aa9-54c5-4964-a481-ff6745ec54d8-config-volume\") pod \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.598454 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.598490 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsqrt\" (UniqueName: \"kubernetes.io/projected/e34f8aa9-54c5-4964-a481-ff6745ec54d8-kube-api-access-gsqrt\") pod \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\" (UID: \"e34f8aa9-54c5-4964-a481-ff6745ec54d8\") " Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.600681 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e34f8aa9-54c5-4964-a481-ff6745ec54d8-config-volume" (OuterVolumeSpecName: "config-volume") pod "e34f8aa9-54c5-4964-a481-ff6745ec54d8" (UID: "e34f8aa9-54c5-4964-a481-ff6745ec54d8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.601396 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.101362934 +0000 UTC m=+150.428076682 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.608577 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e34f8aa9-54c5-4964-a481-ff6745ec54d8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e34f8aa9-54c5-4964-a481-ff6745ec54d8" (UID: "e34f8aa9-54c5-4964-a481-ff6745ec54d8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.608891 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e34f8aa9-54c5-4964-a481-ff6745ec54d8-kube-api-access-gsqrt" (OuterVolumeSpecName: "kube-api-access-gsqrt") pod "e34f8aa9-54c5-4964-a481-ff6745ec54d8" (UID: "e34f8aa9-54c5-4964-a481-ff6745ec54d8"). InnerVolumeSpecName "kube-api-access-gsqrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.660859 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hpzh6" Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.699482 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.699576 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsqrt\" (UniqueName: \"kubernetes.io/projected/e34f8aa9-54c5-4964-a481-ff6745ec54d8-kube-api-access-gsqrt\") on node \"crc\" DevicePath \"\"" Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.699588 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e34f8aa9-54c5-4964-a481-ff6745ec54d8-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.699598 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e34f8aa9-54c5-4964-a481-ff6745ec54d8-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.699874 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.199851741 +0000 UTC m=+150.526565469 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.780600 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:50 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:50 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:50 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.780670 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.800318 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.800502 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.300475508 +0000 UTC m=+150.627189246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.800708 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.801033 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.301023774 +0000 UTC m=+150.627737492 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.901903 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.902095 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.402063422 +0000 UTC m=+150.728777150 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:50 crc kubenswrapper[4816]: I0216 13:05:50.902189 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:50 crc kubenswrapper[4816]: E0216 13:05:50.902502 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.402495024 +0000 UTC m=+150.729208752 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.003222 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.003377 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.503355507 +0000 UTC m=+150.830069235 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.003414 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.003785 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.503777349 +0000 UTC m=+150.830491077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.007191 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f9a061ddf88892f475f9405e43602d5748b1e71995cf6979351657f1e4a6a9bd"} Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.007228 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f69ed999636624ce70e54e91749786d8d00c3cc8f5d4ea92fbf7051983516a28"} Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.010232 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2eb09020fba31b48ed2598f43e42b4be3192406f614dbee771dae4327c6f7d27"} Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.010273 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"87daa5f71e8090d369eebec12d68d8c7d51e7ac4db8c6b880ea7c2d02f3e6766"} Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.012423 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"40730bb718abba9d5223af98a882a7914f11e8355146a949115da6bbd2462ff0"} Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.012471 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"4783bd860ab6cf3d3dc5efde8719f526833b73967ae9fcc8efda261bfc670236"} Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.012887 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.014474 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p66fg" event={"ID":"7be610ca-65bb-4abc-809f-37a005b6f491","Type":"ContainerStarted","Data":"35213a85659456730318021ce5e5c7bf3afcacb99d30dd802dc3440841ee0582"} Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.016163 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.020785 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88" event={"ID":"e34f8aa9-54c5-4964-a481-ff6745ec54d8","Type":"ContainerDied","Data":"b4be6e048c5e6247d95cb8ca377442563bf92ea757e478497bdf7446b71e56b2"} Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.020907 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4be6e048c5e6247d95cb8ca377442563bf92ea757e478497bdf7446b71e56b2" Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.107101 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.108312 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.608290903 +0000 UTC m=+150.935004641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.210348 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.210842 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.710826134 +0000 UTC m=+151.037539862 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.310971 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.311123 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.811099661 +0000 UTC m=+151.137813389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.311535 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.311836 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.811824741 +0000 UTC m=+151.138538549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.352213 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.412229 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.413010 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:51.912981002 +0000 UTC m=+151.239694800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.513310 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.513619 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.013605199 +0000 UTC m=+151.340318927 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.615397 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.615710 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.115695096 +0000 UTC m=+151.442408824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.716879 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.717276 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.21725996 +0000 UTC m=+151.543973688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.778698 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:51 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:51 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:51 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.778762 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.817839 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.818029 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.31799829 +0000 UTC m=+151.644712018 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.818080 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.818483 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.318466392 +0000 UTC m=+151.645180160 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.918984 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.919214 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.419188862 +0000 UTC m=+151.745902590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:51 crc kubenswrapper[4816]: I0216 13:05:51.919315 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:51 crc kubenswrapper[4816]: E0216 13:05:51.919626 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.419612814 +0000 UTC m=+151.746326582 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.020873 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.021078 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.521046023 +0000 UTC m=+151.847759751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.021468 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.021809 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.521794863 +0000 UTC m=+151.848508591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.029065 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p66fg" event={"ID":"7be610ca-65bb-4abc-809f-37a005b6f491","Type":"ContainerStarted","Data":"046be6476931a2631952abcc9ded9f7588a0179360698b11d432fed5b261f7c0"} Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.029105 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p66fg" event={"ID":"7be610ca-65bb-4abc-809f-37a005b6f491","Type":"ContainerStarted","Data":"461a23885e55a0f56285e7b2c7d4b0c80d2b5d92b3eb3c6998aa8f786f24d894"} Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.123172 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.123726 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.623708976 +0000 UTC m=+151.950422704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.214612 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6f4r6"] Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.214854 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e34f8aa9-54c5-4964-a481-ff6745ec54d8" containerName="collect-profiles" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.214869 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e34f8aa9-54c5-4964-a481-ff6745ec54d8" containerName="collect-profiles" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.215000 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e34f8aa9-54c5-4964-a481-ff6745ec54d8" containerName="collect-profiles" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.215821 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.218792 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.225246 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.225626 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.725609738 +0000 UTC m=+152.052323466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.291354 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6f4r6"] Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.326033 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.326361 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.826338938 +0000 UTC m=+152.153052666 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.326571 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xrvr\" (UniqueName: \"kubernetes.io/projected/7550d81a-724b-4009-80d0-fa650ea35fa8-kube-api-access-6xrvr\") pod \"certified-operators-6f4r6\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.326688 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-catalog-content\") pod \"certified-operators-6f4r6\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.326765 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-utilities\") pod \"certified-operators-6f4r6\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.326891 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.327416 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.827401957 +0000 UTC m=+152.154115685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.409205 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-89gd5"] Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.414008 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.422084 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-2wpnx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.423425 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.427897 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.428141 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xrvr\" (UniqueName: \"kubernetes.io/projected/7550d81a-724b-4009-80d0-fa650ea35fa8-kube-api-access-6xrvr\") pod \"certified-operators-6f4r6\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.428186 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-catalog-content\") pod \"certified-operators-6f4r6\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.428218 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-utilities\") pod \"certified-operators-6f4r6\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.428585 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-utilities\") pod \"certified-operators-6f4r6\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.428669 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:52.928640932 +0000 UTC m=+152.255354660 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.429170 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-catalog-content\") pod \"certified-operators-6f4r6\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.436291 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-89gd5"] Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.467355 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xrvr\" (UniqueName: \"kubernetes.io/projected/7550d81a-724b-4009-80d0-fa650ea35fa8-kube-api-access-6xrvr\") pod \"certified-operators-6f4r6\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.469957 4816 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.529618 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.529709 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htr6p\" (UniqueName: \"kubernetes.io/projected/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-kube-api-access-htr6p\") pod \"community-operators-89gd5\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.529732 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-utilities\") pod \"community-operators-89gd5\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.529798 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-catalog-content\") pod \"community-operators-89gd5\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.530062 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:53.03004554 +0000 UTC m=+152.356759268 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.530303 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.587883 4816 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-02-16T13:05:52.4699925Z","Handler":null,"Name":""} Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.604806 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-n6nmx"] Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.605746 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.626238 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n6nmx"] Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.631023 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.631216 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-16 13:05:53.131183671 +0000 UTC m=+152.457897399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.631475 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-utilities\") pod \"community-operators-89gd5\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.631555 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-utilities\") pod \"certified-operators-n6nmx\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.631595 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-catalog-content\") pod \"community-operators-89gd5\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.631620 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-catalog-content\") pod \"certified-operators-n6nmx\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.631694 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.631725 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b9q8\" (UniqueName: \"kubernetes.io/projected/504db8d2-a7d7-4484-9426-6426503b9df3-kube-api-access-4b9q8\") pod \"certified-operators-n6nmx\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.631820 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htr6p\" (UniqueName: \"kubernetes.io/projected/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-kube-api-access-htr6p\") pod \"community-operators-89gd5\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.632003 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-catalog-content\") pod \"community-operators-89gd5\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: E0216 13:05:52.632067 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-16 13:05:53.132047465 +0000 UTC m=+152.458761393 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qjdjp" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.632243 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-utilities\") pod \"community-operators-89gd5\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.654517 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htr6p\" (UniqueName: \"kubernetes.io/projected/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-kube-api-access-htr6p\") pod \"community-operators-89gd5\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.654805 4816 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.654839 4816 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.683827 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.684473 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.688712 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.691387 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.704130 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.733587 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.734755 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9b102838-955a-4fb0-8afe-f0a0e9582e67-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9b102838-955a-4fb0-8afe-f0a0e9582e67\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.734801 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-utilities\") pod \"certified-operators-n6nmx\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.734838 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-catalog-content\") pod \"certified-operators-n6nmx\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.734871 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9b102838-955a-4fb0-8afe-f0a0e9582e67-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9b102838-955a-4fb0-8afe-f0a0e9582e67\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.734908 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b9q8\" (UniqueName: \"kubernetes.io/projected/504db8d2-a7d7-4484-9426-6426503b9df3-kube-api-access-4b9q8\") pod \"certified-operators-n6nmx\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.735763 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.736818 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-utilities\") pod \"certified-operators-n6nmx\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.737093 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-catalog-content\") pod \"certified-operators-n6nmx\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.773069 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.776755 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b9q8\" (UniqueName: \"kubernetes.io/projected/504db8d2-a7d7-4484-9426-6426503b9df3-kube-api-access-4b9q8\") pod \"certified-operators-n6nmx\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.787260 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:52 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:52 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:52 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.787334 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.825045 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nsvtk"] Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.826072 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.836300 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9b102838-955a-4fb0-8afe-f0a0e9582e67-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9b102838-955a-4fb0-8afe-f0a0e9582e67\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.836368 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.836457 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9b102838-955a-4fb0-8afe-f0a0e9582e67-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9b102838-955a-4fb0-8afe-f0a0e9582e67\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.836879 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9b102838-955a-4fb0-8afe-f0a0e9582e67-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9b102838-955a-4fb0-8afe-f0a0e9582e67\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.839552 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nsvtk"] Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.840902 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.840939 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.866475 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9b102838-955a-4fb0-8afe-f0a0e9582e67-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9b102838-955a-4fb0-8afe-f0a0e9582e67\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.888196 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qjdjp\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.919323 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.938358 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-utilities\") pod \"community-operators-nsvtk\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.938417 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-catalog-content\") pod \"community-operators-nsvtk\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.938445 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdxk6\" (UniqueName: \"kubernetes.io/projected/793c1b16-a031-4e64-8874-03cf983d16b5-kube-api-access-cdxk6\") pod \"community-operators-nsvtk\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.996980 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:05:52 crc kubenswrapper[4816]: I0216 13:05:52.997995 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.031144 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.040127 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-utilities\") pod \"community-operators-nsvtk\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.040168 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-catalog-content\") pod \"community-operators-nsvtk\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.040215 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdxk6\" (UniqueName: \"kubernetes.io/projected/793c1b16-a031-4e64-8874-03cf983d16b5-kube-api-access-cdxk6\") pod \"community-operators-nsvtk\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.040967 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-utilities\") pod \"community-operators-nsvtk\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.041191 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-catalog-content\") pod \"community-operators-nsvtk\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.056198 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.057138 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p66fg" event={"ID":"7be610ca-65bb-4abc-809f-37a005b6f491","Type":"ContainerStarted","Data":"46d12fc11e1b4acd461d45ea9532b3f7c4bcd08821d93f8374cf9f2a755e5982"} Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.063523 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdxk6\" (UniqueName: \"kubernetes.io/projected/793c1b16-a031-4e64-8874-03cf983d16b5-kube-api-access-cdxk6\") pod \"community-operators-nsvtk\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.079822 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-89gd5"] Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.088892 4816 patch_prober.go:28] interesting pod/downloads-7954f5f757-c6kz6 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.089283 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-c6kz6" podUID="0196136d-4ea7-4015-97d2-c885db51c66f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.097871 4816 patch_prober.go:28] interesting pod/downloads-7954f5f757-c6kz6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.097935 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-c6kz6" podUID="0196136d-4ea7-4015-97d2-c885db51c66f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.112128 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.130201 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vcm42" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.133419 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6f4r6"] Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.151893 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-p66fg" podStartSLOduration=13.151867423 podStartE2EDuration="13.151867423s" podCreationTimestamp="2026-02-16 13:05:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:53.149171998 +0000 UTC m=+152.475885726" watchObservedRunningTime="2026-02-16 13:05:53.151867423 +0000 UTC m=+152.478581151" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.168921 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.251810 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.252167 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:05:53 crc kubenswrapper[4816]: W0216 13:05:53.276165 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7550d81a_724b_4009_80d0_fa650ea35fa8.slice/crio-4a03afba67733c4bce78655e806378b333a5be62c9cc9829f73a81dd57c0caf2 WatchSource:0}: Error finding container 4a03afba67733c4bce78655e806378b333a5be62c9cc9829f73a81dd57c0caf2: Status 404 returned error can't find the container with id 4a03afba67733c4bce78655e806378b333a5be62c9cc9829f73a81dd57c0caf2 Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.276499 4816 patch_prober.go:28] interesting pod/console-f9d7485db-f2dr7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.276571 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-f2dr7" podUID="89f428fd-8717-4819-81d8-ee04443b38a5" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Feb 16 13:05:53 crc kubenswrapper[4816]: W0216 13:05:53.431141 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod504db8d2_a7d7_4484_9426_6426503b9df3.slice/crio-47e13549ee89a563457a7ba3f611a8d4e44ca26a9ee5d27fac35308b46458a11 WatchSource:0}: Error finding container 47e13549ee89a563457a7ba3f611a8d4e44ca26a9ee5d27fac35308b46458a11: Status 404 returned error can't find the container with id 47e13549ee89a563457a7ba3f611a8d4e44ca26a9ee5d27fac35308b46458a11 Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.455031 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.458904 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n6nmx"] Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.525443 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.536956 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.537426 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.546698 4816 patch_prober.go:28] interesting pod/apiserver-76f77b778f-l7q8r container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]log ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]etcd ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/start-apiserver-admission-initializer ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/generic-apiserver-start-informers ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/max-in-flight-filter ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/storage-object-count-tracker-hook ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/image.openshift.io-apiserver-caches ok Feb 16 13:05:53 crc kubenswrapper[4816]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Feb 16 13:05:53 crc kubenswrapper[4816]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/project.openshift.io-projectcache ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/openshift.io-startinformers ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/openshift.io-restmapperupdater ok Feb 16 13:05:53 crc kubenswrapper[4816]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Feb 16 13:05:53 crc kubenswrapper[4816]: livez check failed Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.546751 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" podUID="a12efaa8-e8bf-47eb-b019-d5e1fe136221" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.577964 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.597060 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qjdjp"] Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.616366 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4qmwv" Feb 16 13:05:53 crc kubenswrapper[4816]: W0216 13:05:53.621004 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39dc10dd_2280_470a_b50e_272b7d1b705f.slice/crio-c7b8d9068f89c94b61cda7cc6b2d6a3baa845df4305df34f975da9fdebd5ce77 WatchSource:0}: Error finding container c7b8d9068f89c94b61cda7cc6b2d6a3baa845df4305df34f975da9fdebd5ce77: Status 404 returned error can't find the container with id c7b8d9068f89c94b61cda7cc6b2d6a3baa845df4305df34f975da9fdebd5ce77 Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.778505 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.781549 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:53 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:53 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:53 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.781643 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.825371 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rn5jb" Feb 16 13:05:53 crc kubenswrapper[4816]: I0216 13:05:53.915603 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nsvtk"] Feb 16 13:05:53 crc kubenswrapper[4816]: W0216 13:05:53.929710 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod793c1b16_a031_4e64_8874_03cf983d16b5.slice/crio-977ff384ed4d5e33c4bd8ec00a100343ef500de755ac38a42bc43833b08f95c2 WatchSource:0}: Error finding container 977ff384ed4d5e33c4bd8ec00a100343ef500de755ac38a42bc43833b08f95c2: Status 404 returned error can't find the container with id 977ff384ed4d5e33c4bd8ec00a100343ef500de755ac38a42bc43833b08f95c2 Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.063268 4816 generic.go:334] "Generic (PLEG): container finished" podID="504db8d2-a7d7-4484-9426-6426503b9df3" containerID="fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a" exitCode=0 Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.063522 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n6nmx" event={"ID":"504db8d2-a7d7-4484-9426-6426503b9df3","Type":"ContainerDied","Data":"fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.063549 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n6nmx" event={"ID":"504db8d2-a7d7-4484-9426-6426503b9df3","Type":"ContainerStarted","Data":"47e13549ee89a563457a7ba3f611a8d4e44ca26a9ee5d27fac35308b46458a11"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.065499 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.065617 4816 generic.go:334] "Generic (PLEG): container finished" podID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerID="1e63127f510f65da1339df47f896c5bb63360a49aa9834f1f0dfb55196b16908" exitCode=0 Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.065680 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f4r6" event={"ID":"7550d81a-724b-4009-80d0-fa650ea35fa8","Type":"ContainerDied","Data":"1e63127f510f65da1339df47f896c5bb63360a49aa9834f1f0dfb55196b16908"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.065705 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f4r6" event={"ID":"7550d81a-724b-4009-80d0-fa650ea35fa8","Type":"ContainerStarted","Data":"4a03afba67733c4bce78655e806378b333a5be62c9cc9829f73a81dd57c0caf2"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.067292 4816 generic.go:334] "Generic (PLEG): container finished" podID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerID="717237571a7679e9e9a4ad4b433c7f01993d1a3c7983e750ac7e5bbe645e2af0" exitCode=0 Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.067337 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89gd5" event={"ID":"19812a43-b6b0-45dc-9c35-b3c787fd4ff1","Type":"ContainerDied","Data":"717237571a7679e9e9a4ad4b433c7f01993d1a3c7983e750ac7e5bbe645e2af0"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.067352 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89gd5" event={"ID":"19812a43-b6b0-45dc-9c35-b3c787fd4ff1","Type":"ContainerStarted","Data":"d41b6a4896160aae033f8fbd176c680759b56fce70e2b6bafd41628051ddbd9a"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.069397 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" event={"ID":"39dc10dd-2280-470a-b50e-272b7d1b705f","Type":"ContainerStarted","Data":"63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.069458 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" event={"ID":"39dc10dd-2280-470a-b50e-272b7d1b705f","Type":"ContainerStarted","Data":"c7b8d9068f89c94b61cda7cc6b2d6a3baa845df4305df34f975da9fdebd5ce77"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.069547 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.071595 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9b102838-955a-4fb0-8afe-f0a0e9582e67","Type":"ContainerStarted","Data":"ef8631c451bb904eed872546899c9ab83940b07aa1306dea058c738876b32570"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.071620 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9b102838-955a-4fb0-8afe-f0a0e9582e67","Type":"ContainerStarted","Data":"1ddd7fc431e3c77a30724ed8629dc28051d0d7386550d3e58cb981c752d91c91"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.074419 4816 generic.go:334] "Generic (PLEG): container finished" podID="793c1b16-a031-4e64-8874-03cf983d16b5" containerID="de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756" exitCode=0 Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.074598 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsvtk" event={"ID":"793c1b16-a031-4e64-8874-03cf983d16b5","Type":"ContainerDied","Data":"de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.074640 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsvtk" event={"ID":"793c1b16-a031-4e64-8874-03cf983d16b5","Type":"ContainerStarted","Data":"977ff384ed4d5e33c4bd8ec00a100343ef500de755ac38a42bc43833b08f95c2"} Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.106977 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" podStartSLOduration=129.106958409 podStartE2EDuration="2m9.106958409s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:54.104532981 +0000 UTC m=+153.431246709" watchObservedRunningTime="2026-02-16 13:05:54.106958409 +0000 UTC m=+153.433672137" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.194515 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.194498342 podStartE2EDuration="2.194498342s" podCreationTimestamp="2026-02-16 13:05:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:54.191684093 +0000 UTC m=+153.518397821" watchObservedRunningTime="2026-02-16 13:05:54.194498342 +0000 UTC m=+153.521212070" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.403131 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lbjwq"] Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.404068 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.408498 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.415722 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbjwq"] Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.487535 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh2gk\" (UniqueName: \"kubernetes.io/projected/7a4d9957-2535-428e-8a45-b092fa854f73-kube-api-access-bh2gk\") pod \"redhat-marketplace-lbjwq\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.487723 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-utilities\") pod \"redhat-marketplace-lbjwq\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.487791 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-catalog-content\") pod \"redhat-marketplace-lbjwq\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.589168 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-utilities\") pod \"redhat-marketplace-lbjwq\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.589538 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-catalog-content\") pod \"redhat-marketplace-lbjwq\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.589566 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh2gk\" (UniqueName: \"kubernetes.io/projected/7a4d9957-2535-428e-8a45-b092fa854f73-kube-api-access-bh2gk\") pod \"redhat-marketplace-lbjwq\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.589755 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-utilities\") pod \"redhat-marketplace-lbjwq\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.589858 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-catalog-content\") pod \"redhat-marketplace-lbjwq\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.608705 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh2gk\" (UniqueName: \"kubernetes.io/projected/7a4d9957-2535-428e-8a45-b092fa854f73-kube-api-access-bh2gk\") pod \"redhat-marketplace-lbjwq\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.724538 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.779700 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:54 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:54 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:54 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.779762 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.805135 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qmhxp"] Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.838413 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.839735 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qmhxp"] Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.896040 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-utilities\") pod \"redhat-marketplace-qmhxp\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.896092 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m27mr\" (UniqueName: \"kubernetes.io/projected/70be8a51-314d-41e9-af68-4a414af8c62d-kube-api-access-m27mr\") pod \"redhat-marketplace-qmhxp\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.896171 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-catalog-content\") pod \"redhat-marketplace-qmhxp\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.999224 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-utilities\") pod \"redhat-marketplace-qmhxp\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.999693 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m27mr\" (UniqueName: \"kubernetes.io/projected/70be8a51-314d-41e9-af68-4a414af8c62d-kube-api-access-m27mr\") pod \"redhat-marketplace-qmhxp\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:54 crc kubenswrapper[4816]: I0216 13:05:54.999768 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-catalog-content\") pod \"redhat-marketplace-qmhxp\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.000137 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-catalog-content\") pod \"redhat-marketplace-qmhxp\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.000230 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbjwq"] Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.000250 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-utilities\") pod \"redhat-marketplace-qmhxp\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.017466 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m27mr\" (UniqueName: \"kubernetes.io/projected/70be8a51-314d-41e9-af68-4a414af8c62d-kube-api-access-m27mr\") pod \"redhat-marketplace-qmhxp\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:55 crc kubenswrapper[4816]: W0216 13:05:55.018122 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a4d9957_2535_428e_8a45_b092fa854f73.slice/crio-b3929c7ef2dcfa351422cd51e6f7ffe323c26737444e8348fa27d90d8bd5319c WatchSource:0}: Error finding container b3929c7ef2dcfa351422cd51e6f7ffe323c26737444e8348fa27d90d8bd5319c: Status 404 returned error can't find the container with id b3929c7ef2dcfa351422cd51e6f7ffe323c26737444e8348fa27d90d8bd5319c Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.087359 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbjwq" event={"ID":"7a4d9957-2535-428e-8a45-b092fa854f73","Type":"ContainerStarted","Data":"b3929c7ef2dcfa351422cd51e6f7ffe323c26737444e8348fa27d90d8bd5319c"} Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.090980 4816 generic.go:334] "Generic (PLEG): container finished" podID="9b102838-955a-4fb0-8afe-f0a0e9582e67" containerID="ef8631c451bb904eed872546899c9ab83940b07aa1306dea058c738876b32570" exitCode=0 Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.091426 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9b102838-955a-4fb0-8afe-f0a0e9582e67","Type":"ContainerDied","Data":"ef8631c451bb904eed872546899c9ab83940b07aa1306dea058c738876b32570"} Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.166037 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.166998 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.169976 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.171891 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.174469 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.190703 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.204868 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3a3616f9-1722-4754-9d7e-466dd6f7b375-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"3a3616f9-1722-4754-9d7e-466dd6f7b375\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.204924 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a3616f9-1722-4754-9d7e-466dd6f7b375-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"3a3616f9-1722-4754-9d7e-466dd6f7b375\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.306009 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3a3616f9-1722-4754-9d7e-466dd6f7b375-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"3a3616f9-1722-4754-9d7e-466dd6f7b375\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.306136 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a3616f9-1722-4754-9d7e-466dd6f7b375-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"3a3616f9-1722-4754-9d7e-466dd6f7b375\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.306108 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3a3616f9-1722-4754-9d7e-466dd6f7b375-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"3a3616f9-1722-4754-9d7e-466dd6f7b375\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.324007 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a3616f9-1722-4754-9d7e-466dd6f7b375-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"3a3616f9-1722-4754-9d7e-466dd6f7b375\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.427752 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wjhmn"] Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.428744 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wjhmn"] Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.428854 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.431445 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.492167 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.508425 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxqn4\" (UniqueName: \"kubernetes.io/projected/e359a80a-04b9-4544-85b7-b51c74bbef61-kube-api-access-xxqn4\") pod \"redhat-operators-wjhmn\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.508489 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-catalog-content\") pod \"redhat-operators-wjhmn\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.508690 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-utilities\") pod \"redhat-operators-wjhmn\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.610518 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-catalog-content\") pod \"redhat-operators-wjhmn\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.610595 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-utilities\") pod \"redhat-operators-wjhmn\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.610635 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxqn4\" (UniqueName: \"kubernetes.io/projected/e359a80a-04b9-4544-85b7-b51c74bbef61-kube-api-access-xxqn4\") pod \"redhat-operators-wjhmn\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.611025 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-catalog-content\") pod \"redhat-operators-wjhmn\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.611166 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-utilities\") pod \"redhat-operators-wjhmn\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.631542 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxqn4\" (UniqueName: \"kubernetes.io/projected/e359a80a-04b9-4544-85b7-b51c74bbef61-kube-api-access-xxqn4\") pod \"redhat-operators-wjhmn\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.751141 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.779907 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:55 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:55 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:55 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.779969 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.804742 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5hz29"] Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.805733 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.820999 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz8zp\" (UniqueName: \"kubernetes.io/projected/c7772346-3c28-4beb-9ddf-f6270c0cdabe-kube-api-access-jz8zp\") pod \"redhat-operators-5hz29\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.823332 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5hz29"] Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.823418 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-utilities\") pod \"redhat-operators-5hz29\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.823468 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-catalog-content\") pod \"redhat-operators-5hz29\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.826409 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qmhxp"] Feb 16 13:05:55 crc kubenswrapper[4816]: W0216 13:05:55.838927 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70be8a51_314d_41e9_af68_4a414af8c62d.slice/crio-f5b51cbc026f8756b0ff20e052b3883c018bbba8ebf4cc5fa358d454de3c78f9 WatchSource:0}: Error finding container f5b51cbc026f8756b0ff20e052b3883c018bbba8ebf4cc5fa358d454de3c78f9: Status 404 returned error can't find the container with id f5b51cbc026f8756b0ff20e052b3883c018bbba8ebf4cc5fa358d454de3c78f9 Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.924173 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz8zp\" (UniqueName: \"kubernetes.io/projected/c7772346-3c28-4beb-9ddf-f6270c0cdabe-kube-api-access-jz8zp\") pod \"redhat-operators-5hz29\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.924443 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-utilities\") pod \"redhat-operators-5hz29\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.924476 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-catalog-content\") pod \"redhat-operators-5hz29\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.925152 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-catalog-content\") pod \"redhat-operators-5hz29\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.925166 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-utilities\") pod \"redhat-operators-5hz29\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.952280 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz8zp\" (UniqueName: \"kubernetes.io/projected/c7772346-3c28-4beb-9ddf-f6270c0cdabe-kube-api-access-jz8zp\") pod \"redhat-operators-5hz29\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:55 crc kubenswrapper[4816]: I0216 13:05:55.963969 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 16 13:05:55 crc kubenswrapper[4816]: W0216 13:05:55.979543 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod3a3616f9_1722_4754_9d7e_466dd6f7b375.slice/crio-398bdbc1f8db7183510e229fae56faf1abb25d9a1df142ddaa1adb835dec573c WatchSource:0}: Error finding container 398bdbc1f8db7183510e229fae56faf1abb25d9a1df142ddaa1adb835dec573c: Status 404 returned error can't find the container with id 398bdbc1f8db7183510e229fae56faf1abb25d9a1df142ddaa1adb835dec573c Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.004681 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wjhmn"] Feb 16 13:05:56 crc kubenswrapper[4816]: W0216 13:05:56.011522 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice/crio-63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635 WatchSource:0}: Error finding container 63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635: Status 404 returned error can't find the container with id 63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635 Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.110704 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjhmn" event={"ID":"e359a80a-04b9-4544-85b7-b51c74bbef61","Type":"ContainerStarted","Data":"63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635"} Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.113110 4816 generic.go:334] "Generic (PLEG): container finished" podID="7a4d9957-2535-428e-8a45-b092fa854f73" containerID="412806ad72a9665d2ddf0f50e303a2d951058b01437017dbfb9f6e19814d7ca3" exitCode=0 Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.113173 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbjwq" event={"ID":"7a4d9957-2535-428e-8a45-b092fa854f73","Type":"ContainerDied","Data":"412806ad72a9665d2ddf0f50e303a2d951058b01437017dbfb9f6e19814d7ca3"} Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.119077 4816 generic.go:334] "Generic (PLEG): container finished" podID="70be8a51-314d-41e9-af68-4a414af8c62d" containerID="8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98" exitCode=0 Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.119142 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qmhxp" event={"ID":"70be8a51-314d-41e9-af68-4a414af8c62d","Type":"ContainerDied","Data":"8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98"} Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.119167 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qmhxp" event={"ID":"70be8a51-314d-41e9-af68-4a414af8c62d","Type":"ContainerStarted","Data":"f5b51cbc026f8756b0ff20e052b3883c018bbba8ebf4cc5fa358d454de3c78f9"} Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.126374 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"3a3616f9-1722-4754-9d7e-466dd6f7b375","Type":"ContainerStarted","Data":"398bdbc1f8db7183510e229fae56faf1abb25d9a1df142ddaa1adb835dec573c"} Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.155539 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.404938 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.419639 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5hz29"] Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.446335 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9b102838-955a-4fb0-8afe-f0a0e9582e67-kube-api-access\") pod \"9b102838-955a-4fb0-8afe-f0a0e9582e67\" (UID: \"9b102838-955a-4fb0-8afe-f0a0e9582e67\") " Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.446393 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9b102838-955a-4fb0-8afe-f0a0e9582e67-kubelet-dir\") pod \"9b102838-955a-4fb0-8afe-f0a0e9582e67\" (UID: \"9b102838-955a-4fb0-8afe-f0a0e9582e67\") " Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.446691 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b102838-955a-4fb0-8afe-f0a0e9582e67-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9b102838-955a-4fb0-8afe-f0a0e9582e67" (UID: "9b102838-955a-4fb0-8afe-f0a0e9582e67"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.452212 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b102838-955a-4fb0-8afe-f0a0e9582e67-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9b102838-955a-4fb0-8afe-f0a0e9582e67" (UID: "9b102838-955a-4fb0-8afe-f0a0e9582e67"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.547394 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9b102838-955a-4fb0-8afe-f0a0e9582e67-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.547717 4816 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9b102838-955a-4fb0-8afe-f0a0e9582e67-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.779986 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:56 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:56 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:56 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:56 crc kubenswrapper[4816]: I0216 13:05:56.780044 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.137874 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"3a3616f9-1722-4754-9d7e-466dd6f7b375","Type":"ContainerStarted","Data":"aaeae2aef2019836c48322003c326cb123ae94d206ffbd2b41cfed0e11f4cb07"} Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.140880 4816 generic.go:334] "Generic (PLEG): container finished" podID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerID="9dde6a7945500dd215a49ab801bc282fdebdb625627c4a5517e092a5e815c844" exitCode=0 Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.141100 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjhmn" event={"ID":"e359a80a-04b9-4544-85b7-b51c74bbef61","Type":"ContainerDied","Data":"9dde6a7945500dd215a49ab801bc282fdebdb625627c4a5517e092a5e815c844"} Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.159927 4816 generic.go:334] "Generic (PLEG): container finished" podID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerID="6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399" exitCode=0 Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.159936 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.159921952 podStartE2EDuration="2.159921952s" podCreationTimestamp="2026-02-16 13:05:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:05:57.14939328 +0000 UTC m=+156.476107008" watchObservedRunningTime="2026-02-16 13:05:57.159921952 +0000 UTC m=+156.486635680" Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.160018 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hz29" event={"ID":"c7772346-3c28-4beb-9ddf-f6270c0cdabe","Type":"ContainerDied","Data":"6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399"} Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.160045 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hz29" event={"ID":"c7772346-3c28-4beb-9ddf-f6270c0cdabe","Type":"ContainerStarted","Data":"2c72abae1f5239ad054153ddfd15db22f7a8f2b553303d7baf1edbed83efba66"} Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.169328 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9b102838-955a-4fb0-8afe-f0a0e9582e67","Type":"ContainerDied","Data":"1ddd7fc431e3c77a30724ed8629dc28051d0d7386550d3e58cb981c752d91c91"} Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.169373 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ddd7fc431e3c77a30724ed8629dc28051d0d7386550d3e58cb981c752d91c91" Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.169432 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.785139 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:57 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:57 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:57 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:57 crc kubenswrapper[4816]: I0216 13:05:57.785421 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:58 crc kubenswrapper[4816]: I0216 13:05:58.207249 4816 generic.go:334] "Generic (PLEG): container finished" podID="3a3616f9-1722-4754-9d7e-466dd6f7b375" containerID="aaeae2aef2019836c48322003c326cb123ae94d206ffbd2b41cfed0e11f4cb07" exitCode=0 Feb 16 13:05:58 crc kubenswrapper[4816]: I0216 13:05:58.207293 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"3a3616f9-1722-4754-9d7e-466dd6f7b375","Type":"ContainerDied","Data":"aaeae2aef2019836c48322003c326cb123ae94d206ffbd2b41cfed0e11f4cb07"} Feb 16 13:05:58 crc kubenswrapper[4816]: I0216 13:05:58.540667 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:58 crc kubenswrapper[4816]: I0216 13:05:58.545618 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-l7q8r" Feb 16 13:05:58 crc kubenswrapper[4816]: I0216 13:05:58.627438 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-p9dwz" Feb 16 13:05:58 crc kubenswrapper[4816]: I0216 13:05:58.778332 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:58 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:58 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:58 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:58 crc kubenswrapper[4816]: I0216 13:05:58.778411 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:05:59 crc kubenswrapper[4816]: I0216 13:05:59.780593 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:05:59 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:05:59 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:05:59 crc kubenswrapper[4816]: healthz check failed Feb 16 13:05:59 crc kubenswrapper[4816]: I0216 13:05:59.782128 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:06:00 crc kubenswrapper[4816]: I0216 13:06:00.777313 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:06:00 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:06:00 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:06:00 crc kubenswrapper[4816]: healthz check failed Feb 16 13:06:00 crc kubenswrapper[4816]: I0216 13:06:00.777367 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:06:01 crc kubenswrapper[4816]: I0216 13:06:01.778409 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:06:01 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:06:01 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:06:01 crc kubenswrapper[4816]: healthz check failed Feb 16 13:06:01 crc kubenswrapper[4816]: I0216 13:06:01.778551 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:06:02 crc kubenswrapper[4816]: I0216 13:06:02.778792 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:06:02 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:06:02 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:06:02 crc kubenswrapper[4816]: healthz check failed Feb 16 13:06:02 crc kubenswrapper[4816]: I0216 13:06:02.779089 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:06:03 crc kubenswrapper[4816]: I0216 13:06:03.084994 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-c6kz6" Feb 16 13:06:03 crc kubenswrapper[4816]: I0216 13:06:03.251653 4816 patch_prober.go:28] interesting pod/console-f9d7485db-f2dr7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Feb 16 13:06:03 crc kubenswrapper[4816]: I0216 13:06:03.251729 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-f2dr7" podUID="89f428fd-8717-4819-81d8-ee04443b38a5" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Feb 16 13:06:03 crc kubenswrapper[4816]: I0216 13:06:03.777869 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:06:03 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:06:03 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:06:03 crc kubenswrapper[4816]: healthz check failed Feb 16 13:06:03 crc kubenswrapper[4816]: I0216 13:06:03.777921 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:06:04 crc kubenswrapper[4816]: I0216 13:06:04.777783 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:06:04 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:06:04 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:06:04 crc kubenswrapper[4816]: healthz check failed Feb 16 13:06:04 crc kubenswrapper[4816]: I0216 13:06:04.777843 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.135483 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.208803 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3a3616f9-1722-4754-9d7e-466dd6f7b375-kubelet-dir\") pod \"3a3616f9-1722-4754-9d7e-466dd6f7b375\" (UID: \"3a3616f9-1722-4754-9d7e-466dd6f7b375\") " Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.208870 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a3616f9-1722-4754-9d7e-466dd6f7b375-kube-api-access\") pod \"3a3616f9-1722-4754-9d7e-466dd6f7b375\" (UID: \"3a3616f9-1722-4754-9d7e-466dd6f7b375\") " Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.209103 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3a3616f9-1722-4754-9d7e-466dd6f7b375-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "3a3616f9-1722-4754-9d7e-466dd6f7b375" (UID: "3a3616f9-1722-4754-9d7e-466dd6f7b375"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.215418 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a3616f9-1722-4754-9d7e-466dd6f7b375-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "3a3616f9-1722-4754-9d7e-466dd6f7b375" (UID: "3a3616f9-1722-4754-9d7e-466dd6f7b375"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.280864 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"3a3616f9-1722-4754-9d7e-466dd6f7b375","Type":"ContainerDied","Data":"398bdbc1f8db7183510e229fae56faf1abb25d9a1df142ddaa1adb835dec573c"} Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.280930 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="398bdbc1f8db7183510e229fae56faf1abb25d9a1df142ddaa1adb835dec573c" Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.281043 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.310903 4816 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3a3616f9-1722-4754-9d7e-466dd6f7b375-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.310969 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a3616f9-1722-4754-9d7e-466dd6f7b375-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.778329 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:06:05 crc kubenswrapper[4816]: [-]has-synced failed: reason withheld Feb 16 13:06:05 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:06:05 crc kubenswrapper[4816]: healthz check failed Feb 16 13:06:05 crc kubenswrapper[4816]: I0216 13:06:05.778427 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:06:06 crc kubenswrapper[4816]: I0216 13:06:06.777711 4816 patch_prober.go:28] interesting pod/router-default-5444994796-lwmcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 16 13:06:06 crc kubenswrapper[4816]: [+]has-synced ok Feb 16 13:06:06 crc kubenswrapper[4816]: [+]process-running ok Feb 16 13:06:06 crc kubenswrapper[4816]: healthz check failed Feb 16 13:06:06 crc kubenswrapper[4816]: I0216 13:06:06.777775 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-lwmcd" podUID="3612ec61-6d09-4cf6-abc7-aa0258e232ea" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 16 13:06:06 crc kubenswrapper[4816]: I0216 13:06:06.940642 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:06:06 crc kubenswrapper[4816]: I0216 13:06:06.941513 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:06:07 crc kubenswrapper[4816]: I0216 13:06:07.779015 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:06:07 crc kubenswrapper[4816]: I0216 13:06:07.781858 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-lwmcd" Feb 16 13:06:08 crc kubenswrapper[4816]: I0216 13:06:08.265456 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:06:08 crc kubenswrapper[4816]: I0216 13:06:08.278351 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/108200fc-f37f-4d80-bd46-314679989e11-metrics-certs\") pod \"network-metrics-daemon-gfwts\" (UID: \"108200fc-f37f-4d80-bd46-314679989e11\") " pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:06:08 crc kubenswrapper[4816]: I0216 13:06:08.324887 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gfwts" Feb 16 13:06:09 crc kubenswrapper[4816]: I0216 13:06:09.285645 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jjn42"] Feb 16 13:06:09 crc kubenswrapper[4816]: I0216 13:06:09.285919 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerName="controller-manager" containerID="cri-o://763560e421874c542fa45a6d4288f1a203475533514a7016238a720216705c9f" gracePeriod=30 Feb 16 13:06:09 crc kubenswrapper[4816]: I0216 13:06:09.300929 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s"] Feb 16 13:06:09 crc kubenswrapper[4816]: I0216 13:06:09.301129 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerName="route-controller-manager" containerID="cri-o://adf048060150afd0625ec77a6f055cb29f89fe8b27b89181b427346b8a9ae9ae" gracePeriod=30 Feb 16 13:06:10 crc kubenswrapper[4816]: I0216 13:06:10.310296 4816 generic.go:334] "Generic (PLEG): container finished" podID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerID="763560e421874c542fa45a6d4288f1a203475533514a7016238a720216705c9f" exitCode=0 Feb 16 13:06:10 crc kubenswrapper[4816]: I0216 13:06:10.310385 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" event={"ID":"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d","Type":"ContainerDied","Data":"763560e421874c542fa45a6d4288f1a203475533514a7016238a720216705c9f"} Feb 16 13:06:10 crc kubenswrapper[4816]: I0216 13:06:10.312782 4816 generic.go:334] "Generic (PLEG): container finished" podID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerID="adf048060150afd0625ec77a6f055cb29f89fe8b27b89181b427346b8a9ae9ae" exitCode=0 Feb 16 13:06:10 crc kubenswrapper[4816]: I0216 13:06:10.312831 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" event={"ID":"1cae9b2f-0c46-4058-8b87-8d8cf933246c","Type":"ContainerDied","Data":"adf048060150afd0625ec77a6f055cb29f89fe8b27b89181b427346b8a9ae9ae"} Feb 16 13:06:13 crc kubenswrapper[4816]: I0216 13:06:13.390094 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:06:13 crc kubenswrapper[4816]: I0216 13:06:13.554067 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:06:13 crc kubenswrapper[4816]: I0216 13:06:13.558975 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:06:14 crc kubenswrapper[4816]: I0216 13:06:14.381542 4816 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9pl8s container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 16 13:06:14 crc kubenswrapper[4816]: I0216 13:06:14.381630 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 16 13:06:14 crc kubenswrapper[4816]: I0216 13:06:14.381643 4816 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-jjn42 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 16 13:06:14 crc kubenswrapper[4816]: I0216 13:06:14.381762 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 16 13:06:16 crc kubenswrapper[4816]: E0216 13:06:16.199746 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 16 13:06:16 crc kubenswrapper[4816]: E0216 13:06:16.200530 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bh2gk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-lbjwq_openshift-marketplace(7a4d9957-2535-428e-8a45-b092fa854f73): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 16 13:06:16 crc kubenswrapper[4816]: E0216 13:06:16.224872 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-lbjwq" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" Feb 16 13:06:21 crc kubenswrapper[4816]: E0216 13:06:21.121342 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-lbjwq" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" Feb 16 13:06:23 crc kubenswrapper[4816]: I0216 13:06:23.840740 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-86gqx" Feb 16 13:06:23 crc kubenswrapper[4816]: I0216 13:06:23.986282 4816 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9pl8s container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 16 13:06:23 crc kubenswrapper[4816]: I0216 13:06:23.986696 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 16 13:06:24 crc kubenswrapper[4816]: I0216 13:06:24.048642 4816 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-jjn42 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 16 13:06:24 crc kubenswrapper[4816]: I0216 13:06:24.048714 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 16 13:06:27 crc kubenswrapper[4816]: E0216 13:06:27.578839 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 16 13:06:27 crc kubenswrapper[4816]: E0216 13:06:27.579014 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6xrvr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-6f4r6_openshift-marketplace(7550d81a-724b-4009-80d0-fa650ea35fa8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 16 13:06:27 crc kubenswrapper[4816]: E0216 13:06:27.580236 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-6f4r6" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" Feb 16 13:06:29 crc kubenswrapper[4816]: I0216 13:06:29.462321 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 16 13:06:32 crc kubenswrapper[4816]: E0216 13:06:32.040939 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-6f4r6" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" Feb 16 13:06:32 crc kubenswrapper[4816]: E0216 13:06:32.180416 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 16 13:06:32 crc kubenswrapper[4816]: E0216 13:06:32.180594 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4b9q8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-n6nmx_openshift-marketplace(504db8d2-a7d7-4484-9426-6426503b9df3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 16 13:06:32 crc kubenswrapper[4816]: E0216 13:06:32.182637 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-n6nmx" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" Feb 16 13:06:33 crc kubenswrapper[4816]: E0216 13:06:33.753126 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-n6nmx" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.863639 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.869747 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.909521 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-764d98ff7d-qrwrn"] Feb 16 13:06:33 crc kubenswrapper[4816]: E0216 13:06:33.910010 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b102838-955a-4fb0-8afe-f0a0e9582e67" containerName="pruner" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.910045 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b102838-955a-4fb0-8afe-f0a0e9582e67" containerName="pruner" Feb 16 13:06:33 crc kubenswrapper[4816]: E0216 13:06:33.910075 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a3616f9-1722-4754-9d7e-466dd6f7b375" containerName="pruner" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.910086 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a3616f9-1722-4754-9d7e-466dd6f7b375" containerName="pruner" Feb 16 13:06:33 crc kubenswrapper[4816]: E0216 13:06:33.910109 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerName="controller-manager" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.910121 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerName="controller-manager" Feb 16 13:06:33 crc kubenswrapper[4816]: E0216 13:06:33.910137 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerName="route-controller-manager" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.910149 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerName="route-controller-manager" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.910290 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b102838-955a-4fb0-8afe-f0a0e9582e67" containerName="pruner" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.910312 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a3616f9-1722-4754-9d7e-466dd6f7b375" containerName="pruner" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.910327 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerName="route-controller-manager" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.910342 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerName="controller-manager" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.910888 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.916713 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-764d98ff7d-qrwrn"] Feb 16 13:06:33 crc kubenswrapper[4816]: E0216 13:06:33.953636 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 16 13:06:33 crc kubenswrapper[4816]: E0216 13:06:33.953805 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-htr6p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-89gd5_openshift-marketplace(19812a43-b6b0-45dc-9c35-b3c787fd4ff1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 16 13:06:33 crc kubenswrapper[4816]: E0216 13:06:33.955176 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-89gd5" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.985733 4816 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9pl8s container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.985788 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997283 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-config\") pod \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997319 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxvr5\" (UniqueName: \"kubernetes.io/projected/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-kube-api-access-qxvr5\") pod \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997337 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-client-ca\") pod \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997399 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5fss\" (UniqueName: \"kubernetes.io/projected/1cae9b2f-0c46-4058-8b87-8d8cf933246c-kube-api-access-k5fss\") pod \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997422 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cae9b2f-0c46-4058-8b87-8d8cf933246c-serving-cert\") pod \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\" (UID: \"1cae9b2f-0c46-4058-8b87-8d8cf933246c\") " Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997439 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-proxy-ca-bundles\") pod \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997477 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-client-ca\") pod \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997504 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-serving-cert\") pod \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997528 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-config\") pod \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\" (UID: \"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d\") " Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997646 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-client-ca\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997685 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-proxy-ca-bundles\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997722 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-config\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997741 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5wcv\" (UniqueName: \"kubernetes.io/projected/65ccdb11-3222-41c2-96ae-6bb14c648950-kube-api-access-j5wcv\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.997766 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65ccdb11-3222-41c2-96ae-6bb14c648950-serving-cert\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.998982 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-config" (OuterVolumeSpecName: "config") pod "1cae9b2f-0c46-4058-8b87-8d8cf933246c" (UID: "1cae9b2f-0c46-4058-8b87-8d8cf933246c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:06:33 crc kubenswrapper[4816]: I0216 13:06:33.999443 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-client-ca" (OuterVolumeSpecName: "client-ca") pod "1cae9b2f-0c46-4058-8b87-8d8cf933246c" (UID: "1cae9b2f-0c46-4058-8b87-8d8cf933246c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.000033 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-config" (OuterVolumeSpecName: "config") pod "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" (UID: "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.000525 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" (UID: "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.000601 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-client-ca" (OuterVolumeSpecName: "client-ca") pod "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" (UID: "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.003968 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-kube-api-access-qxvr5" (OuterVolumeSpecName: "kube-api-access-qxvr5") pod "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" (UID: "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d"). InnerVolumeSpecName "kube-api-access-qxvr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.004671 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" (UID: "0153ac39-6d42-4f8d-9279-1c38e9f8fc6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.004937 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cae9b2f-0c46-4058-8b87-8d8cf933246c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1cae9b2f-0c46-4058-8b87-8d8cf933246c" (UID: "1cae9b2f-0c46-4058-8b87-8d8cf933246c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.010488 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cae9b2f-0c46-4058-8b87-8d8cf933246c-kube-api-access-k5fss" (OuterVolumeSpecName: "kube-api-access-k5fss") pod "1cae9b2f-0c46-4058-8b87-8d8cf933246c" (UID: "1cae9b2f-0c46-4058-8b87-8d8cf933246c"). InnerVolumeSpecName "kube-api-access-k5fss". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:06:34 crc kubenswrapper[4816]: E0216 13:06:34.017083 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 16 13:06:34 crc kubenswrapper[4816]: E0216 13:06:34.017310 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cdxk6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-nsvtk_openshift-marketplace(793c1b16-a031-4e64-8874-03cf983d16b5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 16 13:06:34 crc kubenswrapper[4816]: E0216 13:06:34.018880 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-nsvtk" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.048543 4816 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-jjn42 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.048605 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098576 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-config\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098633 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5wcv\" (UniqueName: \"kubernetes.io/projected/65ccdb11-3222-41c2-96ae-6bb14c648950-kube-api-access-j5wcv\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098685 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65ccdb11-3222-41c2-96ae-6bb14c648950-serving-cert\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098732 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-client-ca\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098757 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-proxy-ca-bundles\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098794 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098805 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098815 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxvr5\" (UniqueName: \"kubernetes.io/projected/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-kube-api-access-qxvr5\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098824 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cae9b2f-0c46-4058-8b87-8d8cf933246c-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098832 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5fss\" (UniqueName: \"kubernetes.io/projected/1cae9b2f-0c46-4058-8b87-8d8cf933246c-kube-api-access-k5fss\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098840 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cae9b2f-0c46-4058-8b87-8d8cf933246c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098850 4816 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098858 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.098867 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.099828 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-proxy-ca-bundles\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.100404 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-client-ca\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.100647 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-config\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.102900 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65ccdb11-3222-41c2-96ae-6bb14c648950-serving-cert\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.119132 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5wcv\" (UniqueName: \"kubernetes.io/projected/65ccdb11-3222-41c2-96ae-6bb14c648950-kube-api-access-j5wcv\") pod \"controller-manager-764d98ff7d-qrwrn\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.148609 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.149425 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.154965 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.155330 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.155738 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.200189 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4f6f4a14-7612-4aef-b42e-a43546f2421e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4f6f4a14-7612-4aef-b42e-a43546f2421e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.200261 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4f6f4a14-7612-4aef-b42e-a43546f2421e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4f6f4a14-7612-4aef-b42e-a43546f2421e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.230675 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.302157 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4f6f4a14-7612-4aef-b42e-a43546f2421e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4f6f4a14-7612-4aef-b42e-a43546f2421e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.302250 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4f6f4a14-7612-4aef-b42e-a43546f2421e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4f6f4a14-7612-4aef-b42e-a43546f2421e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.302322 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4f6f4a14-7612-4aef-b42e-a43546f2421e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4f6f4a14-7612-4aef-b42e-a43546f2421e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.317180 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4f6f4a14-7612-4aef-b42e-a43546f2421e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4f6f4a14-7612-4aef-b42e-a43546f2421e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.471745 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.514464 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" event={"ID":"1cae9b2f-0c46-4058-8b87-8d8cf933246c","Type":"ContainerDied","Data":"a94163f3400e4635beef5be0a92f0ef34b6f1f7ecf9f4e39da312c2f9c462de5"} Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.514514 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.514528 4816 scope.go:117] "RemoveContainer" containerID="adf048060150afd0625ec77a6f055cb29f89fe8b27b89181b427346b8a9ae9ae" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.517496 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" event={"ID":"0153ac39-6d42-4f8d-9279-1c38e9f8fc6d","Type":"ContainerDied","Data":"d8873de42a5f6fc0a08613aba38c6c1b2e17188fff43ebcb318747a534166fc6"} Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.517577 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jjn42" Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.574870 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s"] Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.579204 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9pl8s"] Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.581931 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jjn42"] Feb 16 13:06:34 crc kubenswrapper[4816]: I0216 13:06:34.585388 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jjn42"] Feb 16 13:06:35 crc kubenswrapper[4816]: I0216 13:06:35.407316 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0153ac39-6d42-4f8d-9279-1c38e9f8fc6d" path="/var/lib/kubelet/pods/0153ac39-6d42-4f8d-9279-1c38e9f8fc6d/volumes" Feb 16 13:06:35 crc kubenswrapper[4816]: I0216 13:06:35.408103 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cae9b2f-0c46-4058-8b87-8d8cf933246c" path="/var/lib/kubelet/pods/1cae9b2f-0c46-4058-8b87-8d8cf933246c/volumes" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.242141 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw"] Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.243552 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.246351 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.246512 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.248616 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.248935 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.250029 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.250232 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.251985 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw"] Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.324977 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-client-ca\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.325323 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-serving-cert\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.325359 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5l9w\" (UniqueName: \"kubernetes.io/projected/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-kube-api-access-p5l9w\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.325386 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-config\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.427051 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-serving-cert\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.427109 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5l9w\" (UniqueName: \"kubernetes.io/projected/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-kube-api-access-p5l9w\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.427144 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-config\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.427207 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-client-ca\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.428810 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-client-ca\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.429065 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-config\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.434623 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-serving-cert\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.443683 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5l9w\" (UniqueName: \"kubernetes.io/projected/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-kube-api-access-p5l9w\") pod \"route-controller-manager-b456548cd-fqtgw\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.597704 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.940785 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:06:36 crc kubenswrapper[4816]: I0216 13:06:36.940852 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:06:37 crc kubenswrapper[4816]: E0216 13:06:37.377100 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-nsvtk" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" Feb 16 13:06:37 crc kubenswrapper[4816]: E0216 13:06:37.377928 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-89gd5" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" Feb 16 13:06:37 crc kubenswrapper[4816]: I0216 13:06:37.451349 4816 scope.go:117] "RemoveContainer" containerID="763560e421874c542fa45a6d4288f1a203475533514a7016238a720216705c9f" Feb 16 13:06:37 crc kubenswrapper[4816]: E0216 13:06:37.632466 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 16 13:06:37 crc kubenswrapper[4816]: E0216 13:06:37.632826 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xxqn4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-wjhmn_openshift-marketplace(e359a80a-04b9-4544-85b7-b51c74bbef61): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 16 13:06:37 crc kubenswrapper[4816]: E0216 13:06:37.634000 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-wjhmn" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" Feb 16 13:06:37 crc kubenswrapper[4816]: E0216 13:06:37.717164 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 16 13:06:37 crc kubenswrapper[4816]: E0216 13:06:37.717291 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jz8zp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-5hz29_openshift-marketplace(c7772346-3c28-4beb-9ddf-f6270c0cdabe): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 16 13:06:37 crc kubenswrapper[4816]: E0216 13:06:37.718587 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-5hz29" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" Feb 16 13:06:37 crc kubenswrapper[4816]: I0216 13:06:37.899384 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gfwts"] Feb 16 13:06:37 crc kubenswrapper[4816]: I0216 13:06:37.907913 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw"] Feb 16 13:06:37 crc kubenswrapper[4816]: W0216 13:06:37.913450 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod108200fc_f37f_4d80_bd46_314679989e11.slice/crio-a812a565ea9b4d8a4bebc93cde554d6155702fc22f1d8922045cfdba87a09bd1 WatchSource:0}: Error finding container a812a565ea9b4d8a4bebc93cde554d6155702fc22f1d8922045cfdba87a09bd1: Status 404 returned error can't find the container with id a812a565ea9b4d8a4bebc93cde554d6155702fc22f1d8922045cfdba87a09bd1 Feb 16 13:06:37 crc kubenswrapper[4816]: W0216 13:06:37.921461 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae2f26b7_0406_4928_9c24_fc9d3a2c6b10.slice/crio-139aa568b314232d50ded7572e43bb01393cb52961b9ec5e800a582e169f3589 WatchSource:0}: Error finding container 139aa568b314232d50ded7572e43bb01393cb52961b9ec5e800a582e169f3589: Status 404 returned error can't find the container with id 139aa568b314232d50ded7572e43bb01393cb52961b9ec5e800a582e169f3589 Feb 16 13:06:37 crc kubenswrapper[4816]: I0216 13:06:37.976130 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 16 13:06:37 crc kubenswrapper[4816]: I0216 13:06:37.987029 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-764d98ff7d-qrwrn"] Feb 16 13:06:37 crc kubenswrapper[4816]: W0216 13:06:37.999055 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65ccdb11_3222_41c2_96ae_6bb14c648950.slice/crio-09843936222df934de061ff5a374a6aa090eb20d52ada54fd3d186b273d7aca0 WatchSource:0}: Error finding container 09843936222df934de061ff5a374a6aa090eb20d52ada54fd3d186b273d7aca0: Status 404 returned error can't find the container with id 09843936222df934de061ff5a374a6aa090eb20d52ada54fd3d186b273d7aca0 Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.565366 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"4f6f4a14-7612-4aef-b42e-a43546f2421e","Type":"ContainerStarted","Data":"fdb2242a8ec1cc3d3c7b6f700afdad1e5e9178ec2966f228a6dacfe777de159e"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.565938 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"4f6f4a14-7612-4aef-b42e-a43546f2421e","Type":"ContainerStarted","Data":"3dd9a9068a7f9b2a64f1ee9dbae258b38155c8b78ef4f302dbfbcd7e6c8a3146"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.569635 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" event={"ID":"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10","Type":"ContainerStarted","Data":"eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.569696 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" event={"ID":"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10","Type":"ContainerStarted","Data":"139aa568b314232d50ded7572e43bb01393cb52961b9ec5e800a582e169f3589"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.570477 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.572285 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gfwts" event={"ID":"108200fc-f37f-4d80-bd46-314679989e11","Type":"ContainerStarted","Data":"7fb3b482675959ff518e1f7d26de171d74303a4ce3fbe91edd39ea585b9ef3eb"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.572310 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gfwts" event={"ID":"108200fc-f37f-4d80-bd46-314679989e11","Type":"ContainerStarted","Data":"ec4ecff606f907da353a5e6e47db0b386965a064afc16d7b5c587f430028085f"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.572322 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gfwts" event={"ID":"108200fc-f37f-4d80-bd46-314679989e11","Type":"ContainerStarted","Data":"a812a565ea9b4d8a4bebc93cde554d6155702fc22f1d8922045cfdba87a09bd1"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.575291 4816 generic.go:334] "Generic (PLEG): container finished" podID="7a4d9957-2535-428e-8a45-b092fa854f73" containerID="2f066f1b22502967ceb742e372de5938b6ebead430ed545c32fa195a450a35ef" exitCode=0 Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.575336 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbjwq" event={"ID":"7a4d9957-2535-428e-8a45-b092fa854f73","Type":"ContainerDied","Data":"2f066f1b22502967ceb742e372de5938b6ebead430ed545c32fa195a450a35ef"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.576036 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.582523 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=4.582503743 podStartE2EDuration="4.582503743s" podCreationTimestamp="2026-02-16 13:06:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:06:38.57628758 +0000 UTC m=+197.903001308" watchObservedRunningTime="2026-02-16 13:06:38.582503743 +0000 UTC m=+197.909217471" Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.582759 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" event={"ID":"65ccdb11-3222-41c2-96ae-6bb14c648950","Type":"ContainerStarted","Data":"5e9caaf586a372f46499d4185d495d05700d73681aecdf8f081b7a035d358ecd"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.582791 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" event={"ID":"65ccdb11-3222-41c2-96ae-6bb14c648950","Type":"ContainerStarted","Data":"09843936222df934de061ff5a374a6aa090eb20d52ada54fd3d186b273d7aca0"} Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.583063 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.587484 4816 generic.go:334] "Generic (PLEG): container finished" podID="70be8a51-314d-41e9-af68-4a414af8c62d" containerID="8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c" exitCode=0 Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.588609 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qmhxp" event={"ID":"70be8a51-314d-41e9-af68-4a414af8c62d","Type":"ContainerDied","Data":"8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c"} Feb 16 13:06:38 crc kubenswrapper[4816]: E0216 13:06:38.590391 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-5hz29" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" Feb 16 13:06:38 crc kubenswrapper[4816]: E0216 13:06:38.590450 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-wjhmn" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.591543 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.599104 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" podStartSLOduration=9.599067573 podStartE2EDuration="9.599067573s" podCreationTimestamp="2026-02-16 13:06:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:06:38.59859439 +0000 UTC m=+197.925308118" watchObservedRunningTime="2026-02-16 13:06:38.599067573 +0000 UTC m=+197.925781301" Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.639000 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-gfwts" podStartSLOduration=173.638984273 podStartE2EDuration="2m53.638984273s" podCreationTimestamp="2026-02-16 13:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:06:38.638855839 +0000 UTC m=+197.965569567" watchObservedRunningTime="2026-02-16 13:06:38.638984273 +0000 UTC m=+197.965698001" Feb 16 13:06:38 crc kubenswrapper[4816]: I0216 13:06:38.663078 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" podStartSLOduration=9.663060942 podStartE2EDuration="9.663060942s" podCreationTimestamp="2026-02-16 13:06:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:06:38.66121451 +0000 UTC m=+197.987928238" watchObservedRunningTime="2026-02-16 13:06:38.663060942 +0000 UTC m=+197.989774670" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.594689 4816 generic.go:334] "Generic (PLEG): container finished" podID="4f6f4a14-7612-4aef-b42e-a43546f2421e" containerID="fdb2242a8ec1cc3d3c7b6f700afdad1e5e9178ec2966f228a6dacfe777de159e" exitCode=0 Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.594788 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"4f6f4a14-7612-4aef-b42e-a43546f2421e","Type":"ContainerDied","Data":"fdb2242a8ec1cc3d3c7b6f700afdad1e5e9178ec2966f228a6dacfe777de159e"} Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.598598 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbjwq" event={"ID":"7a4d9957-2535-428e-8a45-b092fa854f73","Type":"ContainerStarted","Data":"987d4079172833ec6055546a0286b29664d780e99079d9f067ead9eb90494486"} Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.600843 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qmhxp" event={"ID":"70be8a51-314d-41e9-af68-4a414af8c62d","Type":"ContainerStarted","Data":"6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423"} Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.640027 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qmhxp" podStartSLOduration=2.742718735 podStartE2EDuration="45.640000814s" podCreationTimestamp="2026-02-16 13:05:54 +0000 UTC" firstStartedPulling="2026-02-16 13:05:56.12400792 +0000 UTC m=+155.450721648" lastFinishedPulling="2026-02-16 13:06:39.021290009 +0000 UTC m=+198.348003727" observedRunningTime="2026-02-16 13:06:39.630045048 +0000 UTC m=+198.956758776" watchObservedRunningTime="2026-02-16 13:06:39.640000814 +0000 UTC m=+198.966714552" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.661500 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lbjwq" podStartSLOduration=2.794487665 podStartE2EDuration="45.661481672s" podCreationTimestamp="2026-02-16 13:05:54 +0000 UTC" firstStartedPulling="2026-02-16 13:05:56.115080252 +0000 UTC m=+155.441793980" lastFinishedPulling="2026-02-16 13:06:38.982074259 +0000 UTC m=+198.308787987" observedRunningTime="2026-02-16 13:06:39.657763439 +0000 UTC m=+198.984477187" watchObservedRunningTime="2026-02-16 13:06:39.661481672 +0000 UTC m=+198.988195400" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.737362 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.738057 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.754005 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.773943 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.774051 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-var-lock\") pod \"installer-9-crc\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.774077 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ccee36b0-4da9-48a3-af2d-063ac451f44d-kube-api-access\") pod \"installer-9-crc\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.875027 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-var-lock\") pod \"installer-9-crc\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.875078 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ccee36b0-4da9-48a3-af2d-063ac451f44d-kube-api-access\") pod \"installer-9-crc\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.875117 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.875183 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-var-lock\") pod \"installer-9-crc\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.875201 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:39 crc kubenswrapper[4816]: I0216 13:06:39.896050 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ccee36b0-4da9-48a3-af2d-063ac451f44d-kube-api-access\") pod \"installer-9-crc\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.054675 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.510204 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 16 13:06:40 crc kubenswrapper[4816]: W0216 13:06:40.514749 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podccee36b0_4da9_48a3_af2d_063ac451f44d.slice/crio-8f99f4e5a17d7e8b89fae638218bd0f10d3203b8c6968977467d7ecfa10ff45d WatchSource:0}: Error finding container 8f99f4e5a17d7e8b89fae638218bd0f10d3203b8c6968977467d7ecfa10ff45d: Status 404 returned error can't find the container with id 8f99f4e5a17d7e8b89fae638218bd0f10d3203b8c6968977467d7ecfa10ff45d Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.606993 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ccee36b0-4da9-48a3-af2d-063ac451f44d","Type":"ContainerStarted","Data":"8f99f4e5a17d7e8b89fae638218bd0f10d3203b8c6968977467d7ecfa10ff45d"} Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.873169 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.891636 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4f6f4a14-7612-4aef-b42e-a43546f2421e-kube-api-access\") pod \"4f6f4a14-7612-4aef-b42e-a43546f2421e\" (UID: \"4f6f4a14-7612-4aef-b42e-a43546f2421e\") " Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.891700 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4f6f4a14-7612-4aef-b42e-a43546f2421e-kubelet-dir\") pod \"4f6f4a14-7612-4aef-b42e-a43546f2421e\" (UID: \"4f6f4a14-7612-4aef-b42e-a43546f2421e\") " Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.891804 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f6f4a14-7612-4aef-b42e-a43546f2421e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4f6f4a14-7612-4aef-b42e-a43546f2421e" (UID: "4f6f4a14-7612-4aef-b42e-a43546f2421e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.891989 4816 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4f6f4a14-7612-4aef-b42e-a43546f2421e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.897788 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f6f4a14-7612-4aef-b42e-a43546f2421e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4f6f4a14-7612-4aef-b42e-a43546f2421e" (UID: "4f6f4a14-7612-4aef-b42e-a43546f2421e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:06:40 crc kubenswrapper[4816]: I0216 13:06:40.992892 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4f6f4a14-7612-4aef-b42e-a43546f2421e-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:41 crc kubenswrapper[4816]: I0216 13:06:41.613663 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ccee36b0-4da9-48a3-af2d-063ac451f44d","Type":"ContainerStarted","Data":"57d8ddba44f1f7f562be1e8e79a175c846abf27ae5a900f67b0605746b3b0665"} Feb 16 13:06:41 crc kubenswrapper[4816]: I0216 13:06:41.615038 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"4f6f4a14-7612-4aef-b42e-a43546f2421e","Type":"ContainerDied","Data":"3dd9a9068a7f9b2a64f1ee9dbae258b38155c8b78ef4f302dbfbcd7e6c8a3146"} Feb 16 13:06:41 crc kubenswrapper[4816]: I0216 13:06:41.615060 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3dd9a9068a7f9b2a64f1ee9dbae258b38155c8b78ef4f302dbfbcd7e6c8a3146" Feb 16 13:06:41 crc kubenswrapper[4816]: I0216 13:06:41.615112 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 16 13:06:41 crc kubenswrapper[4816]: I0216 13:06:41.633468 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.63344824 podStartE2EDuration="2.63344824s" podCreationTimestamp="2026-02-16 13:06:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:06:41.62877726 +0000 UTC m=+200.955490998" watchObservedRunningTime="2026-02-16 13:06:41.63344824 +0000 UTC m=+200.960161968" Feb 16 13:06:41 crc kubenswrapper[4816]: I0216 13:06:41.760330 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-brmt2"] Feb 16 13:06:44 crc kubenswrapper[4816]: I0216 13:06:44.633339 4816 generic.go:334] "Generic (PLEG): container finished" podID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerID="fb97fb1078f028784f6674ae914a089690f5ac6e42d695ef83aecb52fe6ce5b1" exitCode=0 Feb 16 13:06:44 crc kubenswrapper[4816]: I0216 13:06:44.633458 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f4r6" event={"ID":"7550d81a-724b-4009-80d0-fa650ea35fa8","Type":"ContainerDied","Data":"fb97fb1078f028784f6674ae914a089690f5ac6e42d695ef83aecb52fe6ce5b1"} Feb 16 13:06:44 crc kubenswrapper[4816]: I0216 13:06:44.726144 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:06:44 crc kubenswrapper[4816]: I0216 13:06:44.726224 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:06:44 crc kubenswrapper[4816]: I0216 13:06:44.858528 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:06:45 crc kubenswrapper[4816]: I0216 13:06:45.191852 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:06:45 crc kubenswrapper[4816]: I0216 13:06:45.192099 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:06:45 crc kubenswrapper[4816]: I0216 13:06:45.239958 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:06:45 crc kubenswrapper[4816]: I0216 13:06:45.655749 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f4r6" event={"ID":"7550d81a-724b-4009-80d0-fa650ea35fa8","Type":"ContainerStarted","Data":"c0db03af5c6961bfa74dc979dce937737791d6d77d1c1ae1adf382048f1cda38"} Feb 16 13:06:45 crc kubenswrapper[4816]: I0216 13:06:45.673849 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6f4r6" podStartSLOduration=2.719871223 podStartE2EDuration="53.673830478s" podCreationTimestamp="2026-02-16 13:05:52 +0000 UTC" firstStartedPulling="2026-02-16 13:05:54.066791072 +0000 UTC m=+153.393504800" lastFinishedPulling="2026-02-16 13:06:45.020750327 +0000 UTC m=+204.347464055" observedRunningTime="2026-02-16 13:06:45.671460022 +0000 UTC m=+204.998173760" watchObservedRunningTime="2026-02-16 13:06:45.673830478 +0000 UTC m=+205.000544206" Feb 16 13:06:45 crc kubenswrapper[4816]: I0216 13:06:45.693849 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:06:45 crc kubenswrapper[4816]: I0216 13:06:45.707228 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:06:47 crc kubenswrapper[4816]: I0216 13:06:47.667425 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n6nmx" event={"ID":"504db8d2-a7d7-4484-9426-6426503b9df3","Type":"ContainerStarted","Data":"4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca"} Feb 16 13:06:47 crc kubenswrapper[4816]: I0216 13:06:47.831557 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qmhxp"] Feb 16 13:06:48 crc kubenswrapper[4816]: I0216 13:06:48.673807 4816 generic.go:334] "Generic (PLEG): container finished" podID="504db8d2-a7d7-4484-9426-6426503b9df3" containerID="4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca" exitCode=0 Feb 16 13:06:48 crc kubenswrapper[4816]: I0216 13:06:48.673902 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n6nmx" event={"ID":"504db8d2-a7d7-4484-9426-6426503b9df3","Type":"ContainerDied","Data":"4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca"} Feb 16 13:06:48 crc kubenswrapper[4816]: I0216 13:06:48.674161 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qmhxp" podUID="70be8a51-314d-41e9-af68-4a414af8c62d" containerName="registry-server" containerID="cri-o://6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423" gracePeriod=2 Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.355782 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.398717 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m27mr\" (UniqueName: \"kubernetes.io/projected/70be8a51-314d-41e9-af68-4a414af8c62d-kube-api-access-m27mr\") pod \"70be8a51-314d-41e9-af68-4a414af8c62d\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.398796 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-utilities\") pod \"70be8a51-314d-41e9-af68-4a414af8c62d\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.398876 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-catalog-content\") pod \"70be8a51-314d-41e9-af68-4a414af8c62d\" (UID: \"70be8a51-314d-41e9-af68-4a414af8c62d\") " Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.399718 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-utilities" (OuterVolumeSpecName: "utilities") pod "70be8a51-314d-41e9-af68-4a414af8c62d" (UID: "70be8a51-314d-41e9-af68-4a414af8c62d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.418489 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70be8a51-314d-41e9-af68-4a414af8c62d-kube-api-access-m27mr" (OuterVolumeSpecName: "kube-api-access-m27mr") pod "70be8a51-314d-41e9-af68-4a414af8c62d" (UID: "70be8a51-314d-41e9-af68-4a414af8c62d"). InnerVolumeSpecName "kube-api-access-m27mr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.451942 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70be8a51-314d-41e9-af68-4a414af8c62d" (UID: "70be8a51-314d-41e9-af68-4a414af8c62d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.499455 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.499488 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m27mr\" (UniqueName: \"kubernetes.io/projected/70be8a51-314d-41e9-af68-4a414af8c62d-kube-api-access-m27mr\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.499500 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70be8a51-314d-41e9-af68-4a414af8c62d-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.689322 4816 generic.go:334] "Generic (PLEG): container finished" podID="70be8a51-314d-41e9-af68-4a414af8c62d" containerID="6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423" exitCode=0 Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.689390 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qmhxp" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.689402 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qmhxp" event={"ID":"70be8a51-314d-41e9-af68-4a414af8c62d","Type":"ContainerDied","Data":"6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423"} Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.689892 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qmhxp" event={"ID":"70be8a51-314d-41e9-af68-4a414af8c62d","Type":"ContainerDied","Data":"f5b51cbc026f8756b0ff20e052b3883c018bbba8ebf4cc5fa358d454de3c78f9"} Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.689915 4816 scope.go:117] "RemoveContainer" containerID="6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.693399 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n6nmx" event={"ID":"504db8d2-a7d7-4484-9426-6426503b9df3","Type":"ContainerStarted","Data":"e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4"} Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.709726 4816 scope.go:117] "RemoveContainer" containerID="8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.711860 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-n6nmx" podStartSLOduration=2.570364229 podStartE2EDuration="57.711842212s" podCreationTimestamp="2026-02-16 13:05:52 +0000 UTC" firstStartedPulling="2026-02-16 13:05:54.065239469 +0000 UTC m=+153.391953197" lastFinishedPulling="2026-02-16 13:06:49.206717452 +0000 UTC m=+208.533431180" observedRunningTime="2026-02-16 13:06:49.711123081 +0000 UTC m=+209.037836809" watchObservedRunningTime="2026-02-16 13:06:49.711842212 +0000 UTC m=+209.038555940" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.727765 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qmhxp"] Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.731070 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qmhxp"] Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.738412 4816 scope.go:117] "RemoveContainer" containerID="8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.754029 4816 scope.go:117] "RemoveContainer" containerID="6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423" Feb 16 13:06:49 crc kubenswrapper[4816]: E0216 13:06:49.754750 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423\": container with ID starting with 6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423 not found: ID does not exist" containerID="6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.754782 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423"} err="failed to get container status \"6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423\": rpc error: code = NotFound desc = could not find container \"6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423\": container with ID starting with 6531d622fde9c5a93f9f33c303ba94e9d312be217f1c9952089af64ba128e423 not found: ID does not exist" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.754838 4816 scope.go:117] "RemoveContainer" containerID="8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c" Feb 16 13:06:49 crc kubenswrapper[4816]: E0216 13:06:49.755331 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c\": container with ID starting with 8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c not found: ID does not exist" containerID="8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.755358 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c"} err="failed to get container status \"8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c\": rpc error: code = NotFound desc = could not find container \"8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c\": container with ID starting with 8df6e2908b2d0d8e1d81bb856a72622ea37e6445a4dfe8d114d8c4b43636bf7c not found: ID does not exist" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.755373 4816 scope.go:117] "RemoveContainer" containerID="8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98" Feb 16 13:06:49 crc kubenswrapper[4816]: E0216 13:06:49.756089 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98\": container with ID starting with 8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98 not found: ID does not exist" containerID="8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98" Feb 16 13:06:49 crc kubenswrapper[4816]: I0216 13:06:49.756147 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98"} err="failed to get container status \"8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98\": rpc error: code = NotFound desc = could not find container \"8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98\": container with ID starting with 8d165be431319095560395f72cc4447599899fb5e4d8d3d75e8247ca03d1bb98 not found: ID does not exist" Feb 16 13:06:50 crc kubenswrapper[4816]: I0216 13:06:50.703787 4816 generic.go:334] "Generic (PLEG): container finished" podID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerID="d7763256c7a04cf1ddafe5af5d4738b13d7bfe4e84d456150136ac282ca20665" exitCode=0 Feb 16 13:06:50 crc kubenswrapper[4816]: I0216 13:06:50.703867 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89gd5" event={"ID":"19812a43-b6b0-45dc-9c35-b3c787fd4ff1","Type":"ContainerDied","Data":"d7763256c7a04cf1ddafe5af5d4738b13d7bfe4e84d456150136ac282ca20665"} Feb 16 13:06:50 crc kubenswrapper[4816]: I0216 13:06:50.708961 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hz29" event={"ID":"c7772346-3c28-4beb-9ddf-f6270c0cdabe","Type":"ContainerStarted","Data":"620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95"} Feb 16 13:06:51 crc kubenswrapper[4816]: I0216 13:06:51.405467 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70be8a51-314d-41e9-af68-4a414af8c62d" path="/var/lib/kubelet/pods/70be8a51-314d-41e9-af68-4a414af8c62d/volumes" Feb 16 13:06:51 crc kubenswrapper[4816]: I0216 13:06:51.716517 4816 generic.go:334] "Generic (PLEG): container finished" podID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerID="620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95" exitCode=0 Feb 16 13:06:51 crc kubenswrapper[4816]: I0216 13:06:51.716573 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hz29" event={"ID":"c7772346-3c28-4beb-9ddf-f6270c0cdabe","Type":"ContainerDied","Data":"620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95"} Feb 16 13:06:51 crc kubenswrapper[4816]: I0216 13:06:51.719963 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89gd5" event={"ID":"19812a43-b6b0-45dc-9c35-b3c787fd4ff1","Type":"ContainerStarted","Data":"cf7e4d12ed49ab100fdc7da5e26468b3c5f25162474ea59585a0c448ccfd7807"} Feb 16 13:06:51 crc kubenswrapper[4816]: I0216 13:06:51.749234 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-89gd5" podStartSLOduration=2.744770676 podStartE2EDuration="59.749214688s" podCreationTimestamp="2026-02-16 13:05:52 +0000 UTC" firstStartedPulling="2026-02-16 13:05:54.06816433 +0000 UTC m=+153.394878058" lastFinishedPulling="2026-02-16 13:06:51.072608332 +0000 UTC m=+210.399322070" observedRunningTime="2026-02-16 13:06:51.745451224 +0000 UTC m=+211.072164952" watchObservedRunningTime="2026-02-16 13:06:51.749214688 +0000 UTC m=+211.075928426" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.530981 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.531042 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.568470 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.726166 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hz29" event={"ID":"c7772346-3c28-4beb-9ddf-f6270c0cdabe","Type":"ContainerStarted","Data":"7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614"} Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.739878 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.739941 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.752013 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5hz29" podStartSLOduration=2.798829209 podStartE2EDuration="57.751993149s" podCreationTimestamp="2026-02-16 13:05:55 +0000 UTC" firstStartedPulling="2026-02-16 13:05:57.167895143 +0000 UTC m=+156.494608871" lastFinishedPulling="2026-02-16 13:06:52.121059063 +0000 UTC m=+211.447772811" observedRunningTime="2026-02-16 13:06:52.751469795 +0000 UTC m=+212.078183523" watchObservedRunningTime="2026-02-16 13:06:52.751993149 +0000 UTC m=+212.078706877" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.790944 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.920787 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.920839 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:06:52 crc kubenswrapper[4816]: I0216 13:06:52.963305 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:06:53 crc kubenswrapper[4816]: I0216 13:06:53.778547 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-89gd5" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerName="registry-server" probeResult="failure" output=< Feb 16 13:06:53 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 13:06:53 crc kubenswrapper[4816]: > Feb 16 13:06:54 crc kubenswrapper[4816]: I0216 13:06:54.739107 4816 generic.go:334] "Generic (PLEG): container finished" podID="793c1b16-a031-4e64-8874-03cf983d16b5" containerID="c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458" exitCode=0 Feb 16 13:06:54 crc kubenswrapper[4816]: I0216 13:06:54.739193 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsvtk" event={"ID":"793c1b16-a031-4e64-8874-03cf983d16b5","Type":"ContainerDied","Data":"c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458"} Feb 16 13:06:56 crc kubenswrapper[4816]: I0216 13:06:56.156765 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:06:56 crc kubenswrapper[4816]: I0216 13:06:56.157034 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:06:57 crc kubenswrapper[4816]: I0216 13:06:57.191265 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5hz29" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerName="registry-server" probeResult="failure" output=< Feb 16 13:06:57 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 13:06:57 crc kubenswrapper[4816]: > Feb 16 13:07:00 crc kubenswrapper[4816]: I0216 13:07:00.788959 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsvtk" event={"ID":"793c1b16-a031-4e64-8874-03cf983d16b5","Type":"ContainerStarted","Data":"847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47"} Feb 16 13:07:00 crc kubenswrapper[4816]: I0216 13:07:00.792129 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjhmn" event={"ID":"e359a80a-04b9-4544-85b7-b51c74bbef61","Type":"ContainerStarted","Data":"b547f6bec90a1d0f666814ce7d3324e9996d95ff0c42ae565634475efc87d784"} Feb 16 13:07:00 crc kubenswrapper[4816]: I0216 13:07:00.813251 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nsvtk" podStartSLOduration=3.0990158230000002 podStartE2EDuration="1m8.813233883s" podCreationTimestamp="2026-02-16 13:05:52 +0000 UTC" firstStartedPulling="2026-02-16 13:05:54.07605458 +0000 UTC m=+153.402768308" lastFinishedPulling="2026-02-16 13:06:59.7902726 +0000 UTC m=+219.116986368" observedRunningTime="2026-02-16 13:07:00.812568654 +0000 UTC m=+220.139282422" watchObservedRunningTime="2026-02-16 13:07:00.813233883 +0000 UTC m=+220.139947611" Feb 16 13:07:01 crc kubenswrapper[4816]: I0216 13:07:01.800712 4816 generic.go:334] "Generic (PLEG): container finished" podID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerID="b547f6bec90a1d0f666814ce7d3324e9996d95ff0c42ae565634475efc87d784" exitCode=0 Feb 16 13:07:01 crc kubenswrapper[4816]: I0216 13:07:01.800809 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjhmn" event={"ID":"e359a80a-04b9-4544-85b7-b51c74bbef61","Type":"ContainerDied","Data":"b547f6bec90a1d0f666814ce7d3324e9996d95ff0c42ae565634475efc87d784"} Feb 16 13:07:02 crc kubenswrapper[4816]: I0216 13:07:02.777081 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:07:02 crc kubenswrapper[4816]: I0216 13:07:02.808202 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjhmn" event={"ID":"e359a80a-04b9-4544-85b7-b51c74bbef61","Type":"ContainerStarted","Data":"c4380e54e80a746059752fb8a240adda02c69160cdcd45d052efb9e3ec72a6d7"} Feb 16 13:07:02 crc kubenswrapper[4816]: I0216 13:07:02.818136 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:07:02 crc kubenswrapper[4816]: I0216 13:07:02.827683 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wjhmn" podStartSLOduration=2.657424409 podStartE2EDuration="1m7.827641951s" podCreationTimestamp="2026-02-16 13:05:55 +0000 UTC" firstStartedPulling="2026-02-16 13:05:57.145298705 +0000 UTC m=+156.472012433" lastFinishedPulling="2026-02-16 13:07:02.315516237 +0000 UTC m=+221.642229975" observedRunningTime="2026-02-16 13:07:02.824044311 +0000 UTC m=+222.150758059" watchObservedRunningTime="2026-02-16 13:07:02.827641951 +0000 UTC m=+222.154355679" Feb 16 13:07:02 crc kubenswrapper[4816]: I0216 13:07:02.968918 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:07:03 crc kubenswrapper[4816]: I0216 13:07:03.169851 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:07:03 crc kubenswrapper[4816]: I0216 13:07:03.170455 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:07:03 crc kubenswrapper[4816]: I0216 13:07:03.215173 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:07:05 crc kubenswrapper[4816]: I0216 13:07:05.752809 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:07:05 crc kubenswrapper[4816]: I0216 13:07:05.752854 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:07:06 crc kubenswrapper[4816]: I0216 13:07:06.213974 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:07:06 crc kubenswrapper[4816]: I0216 13:07:06.249823 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:07:06 crc kubenswrapper[4816]: I0216 13:07:06.788670 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" podUID="f850653e-8cb2-44e3-9ef5-bbba590bbf9c" containerName="oauth-openshift" containerID="cri-o://986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a" gracePeriod=15 Feb 16 13:07:06 crc kubenswrapper[4816]: I0216 13:07:06.807996 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wjhmn" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerName="registry-server" probeResult="failure" output=< Feb 16 13:07:06 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 13:07:06 crc kubenswrapper[4816]: > Feb 16 13:07:06 crc kubenswrapper[4816]: I0216 13:07:06.941250 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:07:06 crc kubenswrapper[4816]: I0216 13:07:06.941316 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:07:06 crc kubenswrapper[4816]: I0216 13:07:06.941362 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:07:06 crc kubenswrapper[4816]: I0216 13:07:06.941972 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:07:06 crc kubenswrapper[4816]: I0216 13:07:06.942029 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352" gracePeriod=600 Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.032419 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n6nmx"] Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.032671 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-n6nmx" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" containerName="registry-server" containerID="cri-o://e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4" gracePeriod=2 Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.270605 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.308236 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-f54c45747-7ltv2"] Feb 16 13:07:07 crc kubenswrapper[4816]: E0216 13:07:07.308449 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70be8a51-314d-41e9-af68-4a414af8c62d" containerName="extract-utilities" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.308461 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="70be8a51-314d-41e9-af68-4a414af8c62d" containerName="extract-utilities" Feb 16 13:07:07 crc kubenswrapper[4816]: E0216 13:07:07.308470 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f6f4a14-7612-4aef-b42e-a43546f2421e" containerName="pruner" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.308476 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f6f4a14-7612-4aef-b42e-a43546f2421e" containerName="pruner" Feb 16 13:07:07 crc kubenswrapper[4816]: E0216 13:07:07.308486 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70be8a51-314d-41e9-af68-4a414af8c62d" containerName="extract-content" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.308492 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="70be8a51-314d-41e9-af68-4a414af8c62d" containerName="extract-content" Feb 16 13:07:07 crc kubenswrapper[4816]: E0216 13:07:07.308500 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f850653e-8cb2-44e3-9ef5-bbba590bbf9c" containerName="oauth-openshift" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.308506 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f850653e-8cb2-44e3-9ef5-bbba590bbf9c" containerName="oauth-openshift" Feb 16 13:07:07 crc kubenswrapper[4816]: E0216 13:07:07.308518 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70be8a51-314d-41e9-af68-4a414af8c62d" containerName="registry-server" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.308525 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="70be8a51-314d-41e9-af68-4a414af8c62d" containerName="registry-server" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.308634 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f850653e-8cb2-44e3-9ef5-bbba590bbf9c" containerName="oauth-openshift" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.308645 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="70be8a51-314d-41e9-af68-4a414af8c62d" containerName="registry-server" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.308671 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f6f4a14-7612-4aef-b42e-a43546f2421e" containerName="pruner" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.309035 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.318509 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f54c45747-7ltv2"] Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.433907 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-policies\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.433971 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-provider-selection\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.433993 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-ocp-branding-template\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434013 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-idp-0-file-data\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434047 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-login\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434092 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-dir\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434114 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr4hx\" (UniqueName: \"kubernetes.io/projected/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-kube-api-access-pr4hx\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434145 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-cliconfig\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434168 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-serving-cert\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434197 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-router-certs\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434252 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-error\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434332 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-trusted-ca-bundle\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434368 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-session\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434398 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-service-ca\") pod \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\" (UID: \"f850653e-8cb2-44e3-9ef5-bbba590bbf9c\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434602 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434637 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-session\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434695 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434761 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434793 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-template-login\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434828 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-router-certs\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434856 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-audit-policies\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434887 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434916 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-template-error\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434937 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cn6n\" (UniqueName: \"kubernetes.io/projected/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-kube-api-access-6cn6n\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434963 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.434988 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-service-ca\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.435011 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-audit-dir\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.435037 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.436137 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.436312 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.436599 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.437511 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.438306 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.442465 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.445897 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.456218 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.456520 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-kube-api-access-pr4hx" (OuterVolumeSpecName: "kube-api-access-pr4hx") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "kube-api-access-pr4hx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.456798 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.456935 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.459051 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.460627 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.460820 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "f850653e-8cb2-44e3-9ef5-bbba590bbf9c" (UID: "f850653e-8cb2-44e3-9ef5-bbba590bbf9c"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.510675 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.535779 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536036 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-template-login\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536119 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-router-certs\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536199 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-audit-policies\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536293 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536374 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-template-error\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536447 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cn6n\" (UniqueName: \"kubernetes.io/projected/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-kube-api-access-6cn6n\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536522 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536593 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-service-ca\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536696 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-audit-dir\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536774 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536869 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.536963 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-session\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537073 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537181 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537251 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537318 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537397 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537464 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537524 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537587 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537671 4816 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537721 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537736 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537794 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537808 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537818 4816 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537830 4816 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.537840 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr4hx\" (UniqueName: \"kubernetes.io/projected/f850653e-8cb2-44e3-9ef5-bbba590bbf9c-kube-api-access-pr4hx\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.540068 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.540464 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-audit-dir\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.540745 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-audit-policies\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.540798 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.560276 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-service-ca\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.562174 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-template-error\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.562538 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-template-login\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.562842 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-session\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.563641 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.564778 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-router-certs\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.566329 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.567521 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.570211 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cn6n\" (UniqueName: \"kubernetes.io/projected/ebde8bc6-61be-46dd-a9ae-e8441f00b57e-kube-api-access-6cn6n\") pod \"oauth-openshift-f54c45747-7ltv2\" (UID: \"ebde8bc6-61be-46dd-a9ae-e8441f00b57e\") " pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.627851 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.638310 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-catalog-content\") pod \"504db8d2-a7d7-4484-9426-6426503b9df3\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.638356 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-utilities\") pod \"504db8d2-a7d7-4484-9426-6426503b9df3\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.638394 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4b9q8\" (UniqueName: \"kubernetes.io/projected/504db8d2-a7d7-4484-9426-6426503b9df3-kube-api-access-4b9q8\") pod \"504db8d2-a7d7-4484-9426-6426503b9df3\" (UID: \"504db8d2-a7d7-4484-9426-6426503b9df3\") " Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.639141 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-utilities" (OuterVolumeSpecName: "utilities") pod "504db8d2-a7d7-4484-9426-6426503b9df3" (UID: "504db8d2-a7d7-4484-9426-6426503b9df3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.641685 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/504db8d2-a7d7-4484-9426-6426503b9df3-kube-api-access-4b9q8" (OuterVolumeSpecName: "kube-api-access-4b9q8") pod "504db8d2-a7d7-4484-9426-6426503b9df3" (UID: "504db8d2-a7d7-4484-9426-6426503b9df3"). InnerVolumeSpecName "kube-api-access-4b9q8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.689791 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "504db8d2-a7d7-4484-9426-6426503b9df3" (UID: "504db8d2-a7d7-4484-9426-6426503b9df3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.739884 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.740184 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/504db8d2-a7d7-4484-9426-6426503b9df3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.740200 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4b9q8\" (UniqueName: \"kubernetes.io/projected/504db8d2-a7d7-4484-9426-6426503b9df3-kube-api-access-4b9q8\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.844364 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352" exitCode=0 Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.844425 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352"} Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.844452 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"a405cb39ec542f11b1ee9ee44ef19e54fb2c4eb861717d61b9255a4307d36e29"} Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.845998 4816 generic.go:334] "Generic (PLEG): container finished" podID="f850653e-8cb2-44e3-9ef5-bbba590bbf9c" containerID="986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a" exitCode=0 Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.846116 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.846571 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" event={"ID":"f850653e-8cb2-44e3-9ef5-bbba590bbf9c","Type":"ContainerDied","Data":"986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a"} Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.846598 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-brmt2" event={"ID":"f850653e-8cb2-44e3-9ef5-bbba590bbf9c","Type":"ContainerDied","Data":"27bf0bb4c7a7d5a18b5cbba874da19c784ab3dcc1641b04a46a73967c041c409"} Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.846617 4816 scope.go:117] "RemoveContainer" containerID="986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.854980 4816 generic.go:334] "Generic (PLEG): container finished" podID="504db8d2-a7d7-4484-9426-6426503b9df3" containerID="e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4" exitCode=0 Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.855031 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n6nmx" event={"ID":"504db8d2-a7d7-4484-9426-6426503b9df3","Type":"ContainerDied","Data":"e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4"} Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.855059 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n6nmx" event={"ID":"504db8d2-a7d7-4484-9426-6426503b9df3","Type":"ContainerDied","Data":"47e13549ee89a563457a7ba3f611a8d4e44ca26a9ee5d27fac35308b46458a11"} Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.855123 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n6nmx" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.880958 4816 scope.go:117] "RemoveContainer" containerID="986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a" Feb 16 13:07:07 crc kubenswrapper[4816]: E0216 13:07:07.881346 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a\": container with ID starting with 986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a not found: ID does not exist" containerID="986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.881383 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a"} err="failed to get container status \"986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a\": rpc error: code = NotFound desc = could not find container \"986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a\": container with ID starting with 986dc427c3a6efbd367568d1eefa8189b55d6439b70da8f2c5e413ce2418a98a not found: ID does not exist" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.881409 4816 scope.go:117] "RemoveContainer" containerID="e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.890568 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-brmt2"] Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.899517 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-brmt2"] Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.907381 4816 scope.go:117] "RemoveContainer" containerID="4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.917578 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n6nmx"] Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.921343 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-n6nmx"] Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.956938 4816 scope.go:117] "RemoveContainer" containerID="fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.974199 4816 scope.go:117] "RemoveContainer" containerID="e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4" Feb 16 13:07:07 crc kubenswrapper[4816]: E0216 13:07:07.974642 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4\": container with ID starting with e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4 not found: ID does not exist" containerID="e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.974700 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4"} err="failed to get container status \"e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4\": rpc error: code = NotFound desc = could not find container \"e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4\": container with ID starting with e9ff1a434c058708b750820e3f6628b058f1691d853abf6c142e21b89c33c0e4 not found: ID does not exist" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.974728 4816 scope.go:117] "RemoveContainer" containerID="4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca" Feb 16 13:07:07 crc kubenswrapper[4816]: E0216 13:07:07.974976 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca\": container with ID starting with 4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca not found: ID does not exist" containerID="4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.975006 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca"} err="failed to get container status \"4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca\": rpc error: code = NotFound desc = could not find container \"4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca\": container with ID starting with 4b94d55ea2c73620f610c3945acd6ece952a45c3405f72903b9c17ba9d2df6ca not found: ID does not exist" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.975024 4816 scope.go:117] "RemoveContainer" containerID="fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a" Feb 16 13:07:07 crc kubenswrapper[4816]: E0216 13:07:07.975276 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a\": container with ID starting with fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a not found: ID does not exist" containerID="fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a" Feb 16 13:07:07 crc kubenswrapper[4816]: I0216 13:07:07.975316 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a"} err="failed to get container status \"fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a\": rpc error: code = NotFound desc = could not find container \"fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a\": container with ID starting with fa01e2de5734f1f6bb4b56764896d4978f83da3e55ab01b2a364ccf2e171e26a not found: ID does not exist" Feb 16 13:07:08 crc kubenswrapper[4816]: I0216 13:07:08.109083 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f54c45747-7ltv2"] Feb 16 13:07:08 crc kubenswrapper[4816]: W0216 13:07:08.114941 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebde8bc6_61be_46dd_a9ae_e8441f00b57e.slice/crio-22c91d00ebe1fd8ff7b8c88eb1aa4a1f4c43dbc4063d17794e43cf47e2967946 WatchSource:0}: Error finding container 22c91d00ebe1fd8ff7b8c88eb1aa4a1f4c43dbc4063d17794e43cf47e2967946: Status 404 returned error can't find the container with id 22c91d00ebe1fd8ff7b8c88eb1aa4a1f4c43dbc4063d17794e43cf47e2967946 Feb 16 13:07:08 crc kubenswrapper[4816]: I0216 13:07:08.868221 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" event={"ID":"ebde8bc6-61be-46dd-a9ae-e8441f00b57e","Type":"ContainerStarted","Data":"098708b9ce906e930dd780440b119419832169a720bbcb38cac8a999cc9018b8"} Feb 16 13:07:08 crc kubenswrapper[4816]: I0216 13:07:08.868586 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" event={"ID":"ebde8bc6-61be-46dd-a9ae-e8441f00b57e","Type":"ContainerStarted","Data":"22c91d00ebe1fd8ff7b8c88eb1aa4a1f4c43dbc4063d17794e43cf47e2967946"} Feb 16 13:07:08 crc kubenswrapper[4816]: I0216 13:07:08.868608 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:08 crc kubenswrapper[4816]: I0216 13:07:08.874902 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" Feb 16 13:07:08 crc kubenswrapper[4816]: I0216 13:07:08.898041 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-f54c45747-7ltv2" podStartSLOduration=27.898014921 podStartE2EDuration="27.898014921s" podCreationTimestamp="2026-02-16 13:06:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:07:08.891417807 +0000 UTC m=+228.218131555" watchObservedRunningTime="2026-02-16 13:07:08.898014921 +0000 UTC m=+228.224728679" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.230847 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5hz29"] Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.231096 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5hz29" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerName="registry-server" containerID="cri-o://7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614" gracePeriod=2 Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.255987 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-764d98ff7d-qrwrn"] Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.256554 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" podUID="65ccdb11-3222-41c2-96ae-6bb14c648950" containerName="controller-manager" containerID="cri-o://5e9caaf586a372f46499d4185d495d05700d73681aecdf8f081b7a035d358ecd" gracePeriod=30 Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.357703 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw"] Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.357950 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" podUID="ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" containerName="route-controller-manager" containerID="cri-o://eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea" gracePeriod=30 Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.405431 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" path="/var/lib/kubelet/pods/504db8d2-a7d7-4484-9426-6426503b9df3/volumes" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.406437 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f850653e-8cb2-44e3-9ef5-bbba590bbf9c" path="/var/lib/kubelet/pods/f850653e-8cb2-44e3-9ef5-bbba590bbf9c/volumes" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.781585 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.812828 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.874956 4816 generic.go:334] "Generic (PLEG): container finished" podID="65ccdb11-3222-41c2-96ae-6bb14c648950" containerID="5e9caaf586a372f46499d4185d495d05700d73681aecdf8f081b7a035d358ecd" exitCode=0 Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.875016 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" event={"ID":"65ccdb11-3222-41c2-96ae-6bb14c648950","Type":"ContainerDied","Data":"5e9caaf586a372f46499d4185d495d05700d73681aecdf8f081b7a035d358ecd"} Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.876450 4816 generic.go:334] "Generic (PLEG): container finished" podID="ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" containerID="eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea" exitCode=0 Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.876492 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" event={"ID":"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10","Type":"ContainerDied","Data":"eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea"} Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.876510 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" event={"ID":"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10","Type":"ContainerDied","Data":"139aa568b314232d50ded7572e43bb01393cb52961b9ec5e800a582e169f3589"} Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.876532 4816 scope.go:117] "RemoveContainer" containerID="eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.876616 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.879847 4816 generic.go:334] "Generic (PLEG): container finished" podID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerID="7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614" exitCode=0 Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.880487 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hz29" event={"ID":"c7772346-3c28-4beb-9ddf-f6270c0cdabe","Type":"ContainerDied","Data":"7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614"} Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.880522 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hz29" event={"ID":"c7772346-3c28-4beb-9ddf-f6270c0cdabe","Type":"ContainerDied","Data":"2c72abae1f5239ad054153ddfd15db22f7a8f2b553303d7baf1edbed83efba66"} Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.880536 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5hz29" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.891477 4816 scope.go:117] "RemoveContainer" containerID="eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea" Feb 16 13:07:09 crc kubenswrapper[4816]: E0216 13:07:09.891857 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea\": container with ID starting with eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea not found: ID does not exist" containerID="eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.891922 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea"} err="failed to get container status \"eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea\": rpc error: code = NotFound desc = could not find container \"eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea\": container with ID starting with eea4b434447a84288a245cc65c380058c2df3b57021c307de5bae0d5b40949ea not found: ID does not exist" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.891946 4816 scope.go:117] "RemoveContainer" containerID="7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.894285 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-catalog-content\") pod \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.894850 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-serving-cert\") pod \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.894887 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-utilities\") pod \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.894917 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz8zp\" (UniqueName: \"kubernetes.io/projected/c7772346-3c28-4beb-9ddf-f6270c0cdabe-kube-api-access-jz8zp\") pod \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\" (UID: \"c7772346-3c28-4beb-9ddf-f6270c0cdabe\") " Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.894940 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-config\") pod \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.897096 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-config" (OuterVolumeSpecName: "config") pod "ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" (UID: "ae2f26b7-0406-4928-9c24-fc9d3a2c6b10"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.897210 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-utilities" (OuterVolumeSpecName: "utilities") pod "c7772346-3c28-4beb-9ddf-f6270c0cdabe" (UID: "c7772346-3c28-4beb-9ddf-f6270c0cdabe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.901096 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" (UID: "ae2f26b7-0406-4928-9c24-fc9d3a2c6b10"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.903615 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7772346-3c28-4beb-9ddf-f6270c0cdabe-kube-api-access-jz8zp" (OuterVolumeSpecName: "kube-api-access-jz8zp") pod "c7772346-3c28-4beb-9ddf-f6270c0cdabe" (UID: "c7772346-3c28-4beb-9ddf-f6270c0cdabe"). InnerVolumeSpecName "kube-api-access-jz8zp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.909924 4816 scope.go:117] "RemoveContainer" containerID="620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.930199 4816 scope.go:117] "RemoveContainer" containerID="6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.933972 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.944098 4816 scope.go:117] "RemoveContainer" containerID="7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614" Feb 16 13:07:09 crc kubenswrapper[4816]: E0216 13:07:09.944481 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614\": container with ID starting with 7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614 not found: ID does not exist" containerID="7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.944515 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614"} err="failed to get container status \"7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614\": rpc error: code = NotFound desc = could not find container \"7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614\": container with ID starting with 7a149860a0d1f82b200925de830838f205e15a4fbe569ffee3dd8d0e5f255614 not found: ID does not exist" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.944536 4816 scope.go:117] "RemoveContainer" containerID="620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95" Feb 16 13:07:09 crc kubenswrapper[4816]: E0216 13:07:09.944815 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95\": container with ID starting with 620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95 not found: ID does not exist" containerID="620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.944844 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95"} err="failed to get container status \"620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95\": rpc error: code = NotFound desc = could not find container \"620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95\": container with ID starting with 620604ce47348921bedbc51a0d49864a1ef76a5f7bad06c607f185e41e287e95 not found: ID does not exist" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.944861 4816 scope.go:117] "RemoveContainer" containerID="6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399" Feb 16 13:07:09 crc kubenswrapper[4816]: E0216 13:07:09.945429 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399\": container with ID starting with 6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399 not found: ID does not exist" containerID="6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.945449 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399"} err="failed to get container status \"6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399\": rpc error: code = NotFound desc = could not find container \"6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399\": container with ID starting with 6f1ee866a1d46244cee37cdff18c4a6dd3d9b8c44b89b912e7a4ccc7437e7399 not found: ID does not exist" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.996160 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-client-ca\") pod \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.996205 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5l9w\" (UniqueName: \"kubernetes.io/projected/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-kube-api-access-p5l9w\") pod \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\" (UID: \"ae2f26b7-0406-4928-9c24-fc9d3a2c6b10\") " Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.996511 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.996535 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.996549 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz8zp\" (UniqueName: \"kubernetes.io/projected/c7772346-3c28-4beb-9ddf-f6270c0cdabe-kube-api-access-jz8zp\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.996560 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.996753 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-client-ca" (OuterVolumeSpecName: "client-ca") pod "ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" (UID: "ae2f26b7-0406-4928-9c24-fc9d3a2c6b10"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:07:09 crc kubenswrapper[4816]: I0216 13:07:09.999420 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-kube-api-access-p5l9w" (OuterVolumeSpecName: "kube-api-access-p5l9w") pod "ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" (UID: "ae2f26b7-0406-4928-9c24-fc9d3a2c6b10"). InnerVolumeSpecName "kube-api-access-p5l9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.029588 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c7772346-3c28-4beb-9ddf-f6270c0cdabe" (UID: "c7772346-3c28-4beb-9ddf-f6270c0cdabe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.097815 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-client-ca\") pod \"65ccdb11-3222-41c2-96ae-6bb14c648950\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.097897 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65ccdb11-3222-41c2-96ae-6bb14c648950-serving-cert\") pod \"65ccdb11-3222-41c2-96ae-6bb14c648950\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.097922 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-config\") pod \"65ccdb11-3222-41c2-96ae-6bb14c648950\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.097943 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5wcv\" (UniqueName: \"kubernetes.io/projected/65ccdb11-3222-41c2-96ae-6bb14c648950-kube-api-access-j5wcv\") pod \"65ccdb11-3222-41c2-96ae-6bb14c648950\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.098026 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-proxy-ca-bundles\") pod \"65ccdb11-3222-41c2-96ae-6bb14c648950\" (UID: \"65ccdb11-3222-41c2-96ae-6bb14c648950\") " Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.098175 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.098457 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5l9w\" (UniqueName: \"kubernetes.io/projected/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10-kube-api-access-p5l9w\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.098470 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7772346-3c28-4beb-9ddf-f6270c0cdabe-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.098571 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-client-ca" (OuterVolumeSpecName: "client-ca") pod "65ccdb11-3222-41c2-96ae-6bb14c648950" (UID: "65ccdb11-3222-41c2-96ae-6bb14c648950"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.098906 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "65ccdb11-3222-41c2-96ae-6bb14c648950" (UID: "65ccdb11-3222-41c2-96ae-6bb14c648950"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.099358 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-config" (OuterVolumeSpecName: "config") pod "65ccdb11-3222-41c2-96ae-6bb14c648950" (UID: "65ccdb11-3222-41c2-96ae-6bb14c648950"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.101111 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65ccdb11-3222-41c2-96ae-6bb14c648950-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "65ccdb11-3222-41c2-96ae-6bb14c648950" (UID: "65ccdb11-3222-41c2-96ae-6bb14c648950"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.101351 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65ccdb11-3222-41c2-96ae-6bb14c648950-kube-api-access-j5wcv" (OuterVolumeSpecName: "kube-api-access-j5wcv") pod "65ccdb11-3222-41c2-96ae-6bb14c648950" (UID: "65ccdb11-3222-41c2-96ae-6bb14c648950"). InnerVolumeSpecName "kube-api-access-j5wcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.199270 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.199332 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65ccdb11-3222-41c2-96ae-6bb14c648950-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.199345 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.199354 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5wcv\" (UniqueName: \"kubernetes.io/projected/65ccdb11-3222-41c2-96ae-6bb14c648950-kube-api-access-j5wcv\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.199364 4816 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65ccdb11-3222-41c2-96ae-6bb14c648950-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.212387 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw"] Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.216151 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b456548cd-fqtgw"] Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.224470 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5hz29"] Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.227297 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5hz29"] Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.896781 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" event={"ID":"65ccdb11-3222-41c2-96ae-6bb14c648950","Type":"ContainerDied","Data":"09843936222df934de061ff5a374a6aa090eb20d52ada54fd3d186b273d7aca0"} Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.896821 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-764d98ff7d-qrwrn" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.896852 4816 scope.go:117] "RemoveContainer" containerID="5e9caaf586a372f46499d4185d495d05700d73681aecdf8f081b7a035d358ecd" Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.958310 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-764d98ff7d-qrwrn"] Feb 16 13:07:10 crc kubenswrapper[4816]: I0216 13:07:10.968438 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-764d98ff7d-qrwrn"] Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.276856 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb"] Feb 16 13:07:11 crc kubenswrapper[4816]: E0216 13:07:11.277271 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" containerName="registry-server" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277296 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" containerName="registry-server" Feb 16 13:07:11 crc kubenswrapper[4816]: E0216 13:07:11.277327 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" containerName="route-controller-manager" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277345 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" containerName="route-controller-manager" Feb 16 13:07:11 crc kubenswrapper[4816]: E0216 13:07:11.277383 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerName="extract-content" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277402 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerName="extract-content" Feb 16 13:07:11 crc kubenswrapper[4816]: E0216 13:07:11.277432 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" containerName="extract-content" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277448 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" containerName="extract-content" Feb 16 13:07:11 crc kubenswrapper[4816]: E0216 13:07:11.277477 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerName="registry-server" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277495 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerName="registry-server" Feb 16 13:07:11 crc kubenswrapper[4816]: E0216 13:07:11.277518 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" containerName="extract-utilities" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277536 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" containerName="extract-utilities" Feb 16 13:07:11 crc kubenswrapper[4816]: E0216 13:07:11.277558 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65ccdb11-3222-41c2-96ae-6bb14c648950" containerName="controller-manager" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277574 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="65ccdb11-3222-41c2-96ae-6bb14c648950" containerName="controller-manager" Feb 16 13:07:11 crc kubenswrapper[4816]: E0216 13:07:11.277606 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerName="extract-utilities" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277624 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerName="extract-utilities" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277914 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="65ccdb11-3222-41c2-96ae-6bb14c648950" containerName="controller-manager" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277953 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="504db8d2-a7d7-4484-9426-6426503b9df3" containerName="registry-server" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.277977 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" containerName="registry-server" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.278001 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" containerName="route-controller-manager" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.278753 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.280439 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5dcd76744d-dv4nh"] Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.281518 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.281631 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.281890 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.282808 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.282864 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.284958 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.285637 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb"] Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.286602 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.289269 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.289583 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.289648 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.289593 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.290056 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.290088 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.297853 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.301255 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5dcd76744d-dv4nh"] Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.406773 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65ccdb11-3222-41c2-96ae-6bb14c648950" path="/var/lib/kubelet/pods/65ccdb11-3222-41c2-96ae-6bb14c648950/volumes" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.407527 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae2f26b7-0406-4928-9c24-fc9d3a2c6b10" path="/var/lib/kubelet/pods/ae2f26b7-0406-4928-9c24-fc9d3a2c6b10/volumes" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.408041 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7772346-3c28-4beb-9ddf-f6270c0cdabe" path="/var/lib/kubelet/pods/c7772346-3c28-4beb-9ddf-f6270c0cdabe/volumes" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.413257 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-config\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.413297 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78062d6d-e0f0-4659-add1-a4a4ea464c6f-serving-cert\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.413329 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f157dee-0cab-452b-8f32-1fd9e25ecd59-serving-cert\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.413632 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-proxy-ca-bundles\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.413695 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-config\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.414001 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8nr9\" (UniqueName: \"kubernetes.io/projected/78062d6d-e0f0-4659-add1-a4a4ea464c6f-kube-api-access-j8nr9\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.414044 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-client-ca\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.414074 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-client-ca\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.414145 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dth2\" (UniqueName: \"kubernetes.io/projected/8f157dee-0cab-452b-8f32-1fd9e25ecd59-kube-api-access-6dth2\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.516039 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dth2\" (UniqueName: \"kubernetes.io/projected/8f157dee-0cab-452b-8f32-1fd9e25ecd59-kube-api-access-6dth2\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.516137 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-config\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.516182 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78062d6d-e0f0-4659-add1-a4a4ea464c6f-serving-cert\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.516226 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f157dee-0cab-452b-8f32-1fd9e25ecd59-serving-cert\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.516260 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-proxy-ca-bundles\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.516366 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-config\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.516406 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8nr9\" (UniqueName: \"kubernetes.io/projected/78062d6d-e0f0-4659-add1-a4a4ea464c6f-kube-api-access-j8nr9\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.516438 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-client-ca\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.516477 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-client-ca\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.517594 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-proxy-ca-bundles\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.517987 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-client-ca\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.518364 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-client-ca\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.518763 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-config\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.519700 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-config\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.521600 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f157dee-0cab-452b-8f32-1fd9e25ecd59-serving-cert\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.524627 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78062d6d-e0f0-4659-add1-a4a4ea464c6f-serving-cert\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.539907 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dth2\" (UniqueName: \"kubernetes.io/projected/8f157dee-0cab-452b-8f32-1fd9e25ecd59-kube-api-access-6dth2\") pod \"controller-manager-5dcd76744d-dv4nh\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.542464 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8nr9\" (UniqueName: \"kubernetes.io/projected/78062d6d-e0f0-4659-add1-a4a4ea464c6f-kube-api-access-j8nr9\") pod \"route-controller-manager-78495b4c64-fftxb\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.614127 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:11 crc kubenswrapper[4816]: I0216 13:07:11.623983 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:12 crc kubenswrapper[4816]: I0216 13:07:12.027541 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb"] Feb 16 13:07:12 crc kubenswrapper[4816]: W0216 13:07:12.034325 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78062d6d_e0f0_4659_add1_a4a4ea464c6f.slice/crio-c2423d13e8409294128dedd60a41eda69646b0b926998c6e60523b5c42f9d7c3 WatchSource:0}: Error finding container c2423d13e8409294128dedd60a41eda69646b0b926998c6e60523b5c42f9d7c3: Status 404 returned error can't find the container with id c2423d13e8409294128dedd60a41eda69646b0b926998c6e60523b5c42f9d7c3 Feb 16 13:07:12 crc kubenswrapper[4816]: I0216 13:07:12.075794 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5dcd76744d-dv4nh"] Feb 16 13:07:12 crc kubenswrapper[4816]: W0216 13:07:12.082676 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f157dee_0cab_452b_8f32_1fd9e25ecd59.slice/crio-4469ffacf552d480d872a7ecd37953da133e2d507108ef85919b293b063d6c6c WatchSource:0}: Error finding container 4469ffacf552d480d872a7ecd37953da133e2d507108ef85919b293b063d6c6c: Status 404 returned error can't find the container with id 4469ffacf552d480d872a7ecd37953da133e2d507108ef85919b293b063d6c6c Feb 16 13:07:12 crc kubenswrapper[4816]: I0216 13:07:12.913757 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" event={"ID":"8f157dee-0cab-452b-8f32-1fd9e25ecd59","Type":"ContainerStarted","Data":"4469ffacf552d480d872a7ecd37953da133e2d507108ef85919b293b063d6c6c"} Feb 16 13:07:12 crc kubenswrapper[4816]: I0216 13:07:12.915081 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" event={"ID":"78062d6d-e0f0-4659-add1-a4a4ea464c6f","Type":"ContainerStarted","Data":"c2423d13e8409294128dedd60a41eda69646b0b926998c6e60523b5c42f9d7c3"} Feb 16 13:07:13 crc kubenswrapper[4816]: I0216 13:07:13.244696 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:07:13 crc kubenswrapper[4816]: I0216 13:07:13.691334 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nsvtk"] Feb 16 13:07:13 crc kubenswrapper[4816]: I0216 13:07:13.921841 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" event={"ID":"8f157dee-0cab-452b-8f32-1fd9e25ecd59","Type":"ContainerStarted","Data":"c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40"} Feb 16 13:07:13 crc kubenswrapper[4816]: I0216 13:07:13.922068 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:13 crc kubenswrapper[4816]: I0216 13:07:13.924033 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" event={"ID":"78062d6d-e0f0-4659-add1-a4a4ea464c6f","Type":"ContainerStarted","Data":"cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b"} Feb 16 13:07:13 crc kubenswrapper[4816]: I0216 13:07:13.924310 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:13 crc kubenswrapper[4816]: I0216 13:07:13.924502 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nsvtk" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" containerName="registry-server" containerID="cri-o://847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47" gracePeriod=2 Feb 16 13:07:13 crc kubenswrapper[4816]: I0216 13:07:13.932052 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:07:13 crc kubenswrapper[4816]: I0216 13:07:13.944731 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" podStartSLOduration=4.944716618 podStartE2EDuration="4.944716618s" podCreationTimestamp="2026-02-16 13:07:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:07:13.942225688 +0000 UTC m=+233.268939416" watchObservedRunningTime="2026-02-16 13:07:13.944716618 +0000 UTC m=+233.271430346" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.007267 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" podStartSLOduration=5.007250045 podStartE2EDuration="5.007250045s" podCreationTimestamp="2026-02-16 13:07:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:07:14.003574563 +0000 UTC m=+233.330288291" watchObservedRunningTime="2026-02-16 13:07:14.007250045 +0000 UTC m=+233.333963773" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.219940 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.262075 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.378591 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-catalog-content\") pod \"793c1b16-a031-4e64-8874-03cf983d16b5\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.378730 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdxk6\" (UniqueName: \"kubernetes.io/projected/793c1b16-a031-4e64-8874-03cf983d16b5-kube-api-access-cdxk6\") pod \"793c1b16-a031-4e64-8874-03cf983d16b5\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.378771 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-utilities\") pod \"793c1b16-a031-4e64-8874-03cf983d16b5\" (UID: \"793c1b16-a031-4e64-8874-03cf983d16b5\") " Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.379723 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-utilities" (OuterVolumeSpecName: "utilities") pod "793c1b16-a031-4e64-8874-03cf983d16b5" (UID: "793c1b16-a031-4e64-8874-03cf983d16b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.390783 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/793c1b16-a031-4e64-8874-03cf983d16b5-kube-api-access-cdxk6" (OuterVolumeSpecName: "kube-api-access-cdxk6") pod "793c1b16-a031-4e64-8874-03cf983d16b5" (UID: "793c1b16-a031-4e64-8874-03cf983d16b5"). InnerVolumeSpecName "kube-api-access-cdxk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.428308 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "793c1b16-a031-4e64-8874-03cf983d16b5" (UID: "793c1b16-a031-4e64-8874-03cf983d16b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.480003 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdxk6\" (UniqueName: \"kubernetes.io/projected/793c1b16-a031-4e64-8874-03cf983d16b5-kube-api-access-cdxk6\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.480030 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.480039 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/793c1b16-a031-4e64-8874-03cf983d16b5-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.931464 4816 generic.go:334] "Generic (PLEG): container finished" podID="793c1b16-a031-4e64-8874-03cf983d16b5" containerID="847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47" exitCode=0 Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.931504 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsvtk" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.931523 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsvtk" event={"ID":"793c1b16-a031-4e64-8874-03cf983d16b5","Type":"ContainerDied","Data":"847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47"} Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.931582 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsvtk" event={"ID":"793c1b16-a031-4e64-8874-03cf983d16b5","Type":"ContainerDied","Data":"977ff384ed4d5e33c4bd8ec00a100343ef500de755ac38a42bc43833b08f95c2"} Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.931615 4816 scope.go:117] "RemoveContainer" containerID="847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.953892 4816 scope.go:117] "RemoveContainer" containerID="c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458" Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.962311 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nsvtk"] Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.966178 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nsvtk"] Feb 16 13:07:14 crc kubenswrapper[4816]: I0216 13:07:14.979496 4816 scope.go:117] "RemoveContainer" containerID="de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756" Feb 16 13:07:15 crc kubenswrapper[4816]: I0216 13:07:15.002690 4816 scope.go:117] "RemoveContainer" containerID="847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47" Feb 16 13:07:15 crc kubenswrapper[4816]: E0216 13:07:15.003150 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47\": container with ID starting with 847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47 not found: ID does not exist" containerID="847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47" Feb 16 13:07:15 crc kubenswrapper[4816]: I0216 13:07:15.003186 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47"} err="failed to get container status \"847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47\": rpc error: code = NotFound desc = could not find container \"847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47\": container with ID starting with 847c4c4772844e3c8fe3ce3749729afea63059e68c0fbc2bc26f422c404deb47 not found: ID does not exist" Feb 16 13:07:15 crc kubenswrapper[4816]: I0216 13:07:15.003209 4816 scope.go:117] "RemoveContainer" containerID="c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458" Feb 16 13:07:15 crc kubenswrapper[4816]: E0216 13:07:15.003574 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458\": container with ID starting with c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458 not found: ID does not exist" containerID="c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458" Feb 16 13:07:15 crc kubenswrapper[4816]: I0216 13:07:15.003596 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458"} err="failed to get container status \"c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458\": rpc error: code = NotFound desc = could not find container \"c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458\": container with ID starting with c9bacc9720ba7fd987f373692de403dce432541be5bce9c0ffee4cc9d8278458 not found: ID does not exist" Feb 16 13:07:15 crc kubenswrapper[4816]: I0216 13:07:15.003612 4816 scope.go:117] "RemoveContainer" containerID="de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756" Feb 16 13:07:15 crc kubenswrapper[4816]: E0216 13:07:15.004126 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756\": container with ID starting with de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756 not found: ID does not exist" containerID="de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756" Feb 16 13:07:15 crc kubenswrapper[4816]: I0216 13:07:15.004151 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756"} err="failed to get container status \"de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756\": rpc error: code = NotFound desc = could not find container \"de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756\": container with ID starting with de5fe9a94b55adcc6381c110fbb88700f1818dcf90a2fcf6c9b9d0342d74b756 not found: ID does not exist" Feb 16 13:07:15 crc kubenswrapper[4816]: I0216 13:07:15.405678 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" path="/var/lib/kubelet/pods/793c1b16-a031-4e64-8874-03cf983d16b5/volumes" Feb 16 13:07:15 crc kubenswrapper[4816]: I0216 13:07:15.812627 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:07:15 crc kubenswrapper[4816]: I0216 13:07:15.856131 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.403889 4816 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.404520 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" containerName="registry-server" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.404539 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" containerName="registry-server" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.404552 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" containerName="extract-content" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.404562 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" containerName="extract-content" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.404616 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" containerName="extract-utilities" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.404628 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" containerName="extract-utilities" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.404806 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="793c1b16-a031-4e64-8874-03cf983d16b5" containerName="registry-server" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.405246 4816 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.405455 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.405726 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517" gracePeriod=15 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.405802 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18" gracePeriod=15 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.405875 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda" gracePeriod=15 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.405935 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef" gracePeriod=15 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.405945 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357" gracePeriod=15 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.406570 4816 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.406799 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.406818 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.406830 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.406840 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.406854 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.406864 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.406877 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.406887 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.406901 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.406911 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.406925 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.406934 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.406949 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.406959 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.407094 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.407111 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.407125 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.407139 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.407153 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.407167 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.407183 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.407324 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.407336 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.470113 4816 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.129.56.244:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.531894 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.532069 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.532211 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.532295 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.532380 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.532466 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.532552 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.532805 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.634728 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.634786 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.634814 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.634840 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.634866 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.634924 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.634983 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.635221 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.635312 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.635403 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.635463 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.635494 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.635634 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.635761 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.635786 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.635806 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.771766 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:18 crc kubenswrapper[4816]: W0216 13:07:18.794234 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-ed0a61a9ce87d9868521c61645f15bc6d1626573fc83b7c3ae8535b69837108e WatchSource:0}: Error finding container ed0a61a9ce87d9868521c61645f15bc6d1626573fc83b7c3ae8535b69837108e: Status 404 returned error can't find the container with id ed0a61a9ce87d9868521c61645f15bc6d1626573fc83b7c3ae8535b69837108e Feb 16 13:07:18 crc kubenswrapper[4816]: E0216 13:07:18.802716 4816 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.244:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1894bbfaae2e0bb2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-16 13:07:18.801918898 +0000 UTC m=+238.128632636,LastTimestamp:2026-02-16 13:07:18.801918898 +0000 UTC m=+238.128632636,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.956332 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.957939 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.960725 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef" exitCode=0 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.960772 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18" exitCode=0 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.960788 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda" exitCode=0 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.960807 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357" exitCode=2 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.960915 4816 scope.go:117] "RemoveContainer" containerID="b738a705809b147e3b062231182018e4c151209e8bec8e8b4346b1cd03c15b13" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.963133 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"ed0a61a9ce87d9868521c61645f15bc6d1626573fc83b7c3ae8535b69837108e"} Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.965194 4816 generic.go:334] "Generic (PLEG): container finished" podID="ccee36b0-4da9-48a3-af2d-063ac451f44d" containerID="57d8ddba44f1f7f562be1e8e79a175c846abf27ae5a900f67b0605746b3b0665" exitCode=0 Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.965234 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ccee36b0-4da9-48a3-af2d-063ac451f44d","Type":"ContainerDied","Data":"57d8ddba44f1f7f562be1e8e79a175c846abf27ae5a900f67b0605746b3b0665"} Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.966199 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:18 crc kubenswrapper[4816]: I0216 13:07:18.966618 4816 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:19 crc kubenswrapper[4816]: I0216 13:07:19.972921 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 16 13:07:19 crc kubenswrapper[4816]: I0216 13:07:19.975096 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855"} Feb 16 13:07:19 crc kubenswrapper[4816]: I0216 13:07:19.975682 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:19 crc kubenswrapper[4816]: E0216 13:07:19.975702 4816 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.129.56.244:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.305324 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.306184 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.458418 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ccee36b0-4da9-48a3-af2d-063ac451f44d-kube-api-access\") pod \"ccee36b0-4da9-48a3-af2d-063ac451f44d\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.458557 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-kubelet-dir\") pod \"ccee36b0-4da9-48a3-af2d-063ac451f44d\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.458649 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-var-lock\") pod \"ccee36b0-4da9-48a3-af2d-063ac451f44d\" (UID: \"ccee36b0-4da9-48a3-af2d-063ac451f44d\") " Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.458793 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ccee36b0-4da9-48a3-af2d-063ac451f44d" (UID: "ccee36b0-4da9-48a3-af2d-063ac451f44d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.459065 4816 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.459133 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-var-lock" (OuterVolumeSpecName: "var-lock") pod "ccee36b0-4da9-48a3-af2d-063ac451f44d" (UID: "ccee36b0-4da9-48a3-af2d-063ac451f44d"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.475064 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccee36b0-4da9-48a3-af2d-063ac451f44d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ccee36b0-4da9-48a3-af2d-063ac451f44d" (UID: "ccee36b0-4da9-48a3-af2d-063ac451f44d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.561913 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ccee36b0-4da9-48a3-af2d-063ac451f44d-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.561967 4816 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ccee36b0-4da9-48a3-af2d-063ac451f44d-var-lock\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.783152 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.783941 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.784516 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.785152 4816 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:20 crc kubenswrapper[4816]: E0216 13:07:20.881757 4816 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.244:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1894bbfaae2e0bb2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-16 13:07:18.801918898 +0000 UTC m=+238.128632636,LastTimestamp:2026-02-16 13:07:18.801918898 +0000 UTC m=+238.128632636,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.966502 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.966726 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.966800 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.967099 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.967109 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.967157 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.986358 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.987607 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517" exitCode=0 Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.987757 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.987774 4816 scope.go:117] "RemoveContainer" containerID="b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.991299 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.991325 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ccee36b0-4da9-48a3-af2d-063ac451f44d","Type":"ContainerDied","Data":"8f99f4e5a17d7e8b89fae638218bd0f10d3203b8c6968977467d7ecfa10ff45d"} Feb 16 13:07:20 crc kubenswrapper[4816]: I0216 13:07:20.991381 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f99f4e5a17d7e8b89fae638218bd0f10d3203b8c6968977467d7ecfa10ff45d" Feb 16 13:07:20 crc kubenswrapper[4816]: E0216 13:07:20.992341 4816 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.129.56.244:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.015896 4816 scope.go:117] "RemoveContainer" containerID="83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.016236 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.016395 4816 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.021330 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.022000 4816 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.036381 4816 scope.go:117] "RemoveContainer" containerID="a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.064062 4816 scope.go:117] "RemoveContainer" containerID="7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.068720 4816 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.068766 4816 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.068785 4816 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.082558 4816 scope.go:117] "RemoveContainer" containerID="013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.107704 4816 scope.go:117] "RemoveContainer" containerID="5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.131587 4816 scope.go:117] "RemoveContainer" containerID="b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.132765 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\": container with ID starting with b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef not found: ID does not exist" containerID="b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.132886 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef"} err="failed to get container status \"b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\": rpc error: code = NotFound desc = could not find container \"b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef\": container with ID starting with b30e6a68fb084f9c8e1052624b4663cf446a3ffce6bbca5558cb7f13353abfef not found: ID does not exist" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.132984 4816 scope.go:117] "RemoveContainer" containerID="83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.133761 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\": container with ID starting with 83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18 not found: ID does not exist" containerID="83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.133804 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18"} err="failed to get container status \"83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\": rpc error: code = NotFound desc = could not find container \"83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18\": container with ID starting with 83ab8f13baabb41bb9d7898b01d3d6a26217ecc9b84e6bb81c86a4218eb18d18 not found: ID does not exist" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.133836 4816 scope.go:117] "RemoveContainer" containerID="a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.134194 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\": container with ID starting with a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda not found: ID does not exist" containerID="a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.134260 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda"} err="failed to get container status \"a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\": rpc error: code = NotFound desc = could not find container \"a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda\": container with ID starting with a5eab5f5d945e81212dee47231727e6455f24827493514a250abaa9a8d6e4dda not found: ID does not exist" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.134303 4816 scope.go:117] "RemoveContainer" containerID="7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.135085 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\": container with ID starting with 7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357 not found: ID does not exist" containerID="7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.135114 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357"} err="failed to get container status \"7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\": rpc error: code = NotFound desc = could not find container \"7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357\": container with ID starting with 7b281356bcfd0750b70fd4f4f53492f66be1a06eea802d453086cbcecb15c357 not found: ID does not exist" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.135131 4816 scope.go:117] "RemoveContainer" containerID="013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.135582 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\": container with ID starting with 013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517 not found: ID does not exist" containerID="013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.135613 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517"} err="failed to get container status \"013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\": rpc error: code = NotFound desc = could not find container \"013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517\": container with ID starting with 013d4c93c7316d8ad39d9f182063059d76e62fa11fbfd249851684557a1f6517 not found: ID does not exist" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.135691 4816 scope.go:117] "RemoveContainer" containerID="5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.136090 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\": container with ID starting with 5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1 not found: ID does not exist" containerID="5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.136131 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1"} err="failed to get container status \"5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\": rpc error: code = NotFound desc = could not find container \"5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1\": container with ID starting with 5099d1458ece30188a163e4583da6d58f1a71c2746460a9239862cf317362ab1 not found: ID does not exist" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.318042 4816 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.318592 4816 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.319149 4816 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.319634 4816 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.320138 4816 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.320202 4816 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.320707 4816 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" interval="200ms" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.407698 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.408202 4816 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:21 crc kubenswrapper[4816]: I0216 13:07:21.408926 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.521897 4816 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" interval="400ms" Feb 16 13:07:21 crc kubenswrapper[4816]: E0216 13:07:21.922534 4816 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" interval="800ms" Feb 16 13:07:22 crc kubenswrapper[4816]: E0216 13:07:22.724519 4816 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" interval="1.6s" Feb 16 13:07:24 crc kubenswrapper[4816]: E0216 13:07:24.325562 4816 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" interval="3.2s" Feb 16 13:07:27 crc kubenswrapper[4816]: E0216 13:07:27.526827 4816 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.244:6443: connect: connection refused" interval="6.4s" Feb 16 13:07:30 crc kubenswrapper[4816]: E0216 13:07:30.882938 4816 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.244:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1894bbfaae2e0bb2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-16 13:07:18.801918898 +0000 UTC m=+238.128632636,LastTimestamp:2026-02-16 13:07:18.801918898 +0000 UTC m=+238.128632636,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 16 13:07:31 crc kubenswrapper[4816]: I0216 13:07:31.047626 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 16 13:07:31 crc kubenswrapper[4816]: I0216 13:07:31.047717 4816 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda" exitCode=1 Feb 16 13:07:31 crc kubenswrapper[4816]: I0216 13:07:31.047751 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda"} Feb 16 13:07:31 crc kubenswrapper[4816]: I0216 13:07:31.048321 4816 scope.go:117] "RemoveContainer" containerID="81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda" Feb 16 13:07:31 crc kubenswrapper[4816]: I0216 13:07:31.048618 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:31 crc kubenswrapper[4816]: I0216 13:07:31.049244 4816 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:31 crc kubenswrapper[4816]: I0216 13:07:31.400713 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:31 crc kubenswrapper[4816]: I0216 13:07:31.401537 4816 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.055739 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.056906 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"35c876e81102a64d6df4a78388bfe8884c281ff9d03f79d1b777cf35771469bc"} Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.058228 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.059136 4816 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.398285 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.399850 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.400439 4816 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.414973 4816 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.415011 4816 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:32 crc kubenswrapper[4816]: E0216 13:07:32.415439 4816 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:32 crc kubenswrapper[4816]: I0216 13:07:32.415794 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:33 crc kubenswrapper[4816]: I0216 13:07:33.065129 4816 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="bd5755bb84763783147106502d846d4b4b9a39f10a9d118bf56bbd91756cffa3" exitCode=0 Feb 16 13:07:33 crc kubenswrapper[4816]: I0216 13:07:33.065184 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"bd5755bb84763783147106502d846d4b4b9a39f10a9d118bf56bbd91756cffa3"} Feb 16 13:07:33 crc kubenswrapper[4816]: I0216 13:07:33.065217 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4f460d063bc32440db7c22b26e1241f350d6a6cd67cf1bd1b927309623923175"} Feb 16 13:07:33 crc kubenswrapper[4816]: I0216 13:07:33.065540 4816 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:33 crc kubenswrapper[4816]: I0216 13:07:33.065558 4816 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:33 crc kubenswrapper[4816]: I0216 13:07:33.066191 4816 status_manager.go:851] "Failed to get status for pod" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:33 crc kubenswrapper[4816]: E0216 13:07:33.066853 4816 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:33 crc kubenswrapper[4816]: I0216 13:07:33.066874 4816 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.244:6443: connect: connection refused" Feb 16 13:07:34 crc kubenswrapper[4816]: I0216 13:07:34.079576 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"03b95f7ef82f883c082d9e8a2ee1a75206e177ed4de0ab42a4fdaa01aba309f1"} Feb 16 13:07:34 crc kubenswrapper[4816]: I0216 13:07:34.079960 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bb70dbee2c390b2159be9ac8fabf96813b8e71517f8841c9b722a3eb60368137"} Feb 16 13:07:34 crc kubenswrapper[4816]: I0216 13:07:34.079972 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"35e65cbfc2704f9f984b28c2eddad834ac3703e523db9e2c1a540c3d244a3c02"} Feb 16 13:07:34 crc kubenswrapper[4816]: I0216 13:07:34.079981 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9acf41acaaca86ce020c021cb8c11c304c61bde057565ab971376ada08298f48"} Feb 16 13:07:34 crc kubenswrapper[4816]: I0216 13:07:34.536048 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 16 13:07:34 crc kubenswrapper[4816]: I0216 13:07:34.536354 4816 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Feb 16 13:07:34 crc kubenswrapper[4816]: I0216 13:07:34.536389 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Feb 16 13:07:35 crc kubenswrapper[4816]: I0216 13:07:35.093611 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fbb5ea7b3f46b7e5e052067226a3bbbe740e1e518dc33b5c81ac1706eb07e672"} Feb 16 13:07:35 crc kubenswrapper[4816]: I0216 13:07:35.093916 4816 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:35 crc kubenswrapper[4816]: I0216 13:07:35.093931 4816 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:35 crc kubenswrapper[4816]: I0216 13:07:35.094150 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:35 crc kubenswrapper[4816]: I0216 13:07:35.467545 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 16 13:07:37 crc kubenswrapper[4816]: I0216 13:07:37.416867 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:37 crc kubenswrapper[4816]: I0216 13:07:37.416941 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:37 crc kubenswrapper[4816]: I0216 13:07:37.427982 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:40 crc kubenswrapper[4816]: I0216 13:07:40.103611 4816 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:41 crc kubenswrapper[4816]: I0216 13:07:41.122480 4816 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:41 crc kubenswrapper[4816]: I0216 13:07:41.122514 4816 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:41 crc kubenswrapper[4816]: I0216 13:07:41.127355 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:07:41 crc kubenswrapper[4816]: I0216 13:07:41.407681 4816 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c665c479-cbde-42fd-805d-afb3a423f6ba" Feb 16 13:07:42 crc kubenswrapper[4816]: I0216 13:07:42.126602 4816 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:42 crc kubenswrapper[4816]: I0216 13:07:42.126629 4816 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="84695303-3a5e-4393-9721-39e156759f00" Feb 16 13:07:42 crc kubenswrapper[4816]: I0216 13:07:42.130802 4816 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c665c479-cbde-42fd-805d-afb3a423f6ba" Feb 16 13:07:44 crc kubenswrapper[4816]: I0216 13:07:44.536333 4816 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Feb 16 13:07:44 crc kubenswrapper[4816]: I0216 13:07:44.538247 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Feb 16 13:07:49 crc kubenswrapper[4816]: I0216 13:07:49.111718 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 16 13:07:49 crc kubenswrapper[4816]: I0216 13:07:49.872164 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 16 13:07:49 crc kubenswrapper[4816]: I0216 13:07:49.908341 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 16 13:07:49 crc kubenswrapper[4816]: I0216 13:07:49.925169 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 16 13:07:50 crc kubenswrapper[4816]: I0216 13:07:50.188068 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 16 13:07:50 crc kubenswrapper[4816]: I0216 13:07:50.218704 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 16 13:07:50 crc kubenswrapper[4816]: I0216 13:07:50.426094 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 16 13:07:50 crc kubenswrapper[4816]: I0216 13:07:50.543177 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 16 13:07:50 crc kubenswrapper[4816]: I0216 13:07:50.614127 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 16 13:07:50 crc kubenswrapper[4816]: I0216 13:07:50.636160 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 16 13:07:50 crc kubenswrapper[4816]: I0216 13:07:50.663774 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 16 13:07:50 crc kubenswrapper[4816]: I0216 13:07:50.692477 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 16 13:07:50 crc kubenswrapper[4816]: I0216 13:07:50.852991 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 16 13:07:51 crc kubenswrapper[4816]: I0216 13:07:51.400808 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 16 13:07:51 crc kubenswrapper[4816]: I0216 13:07:51.531896 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 16 13:07:51 crc kubenswrapper[4816]: I0216 13:07:51.720378 4816 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 16 13:07:51 crc kubenswrapper[4816]: I0216 13:07:51.836758 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 16 13:07:51 crc kubenswrapper[4816]: I0216 13:07:51.937532 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.057154 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.197202 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.219265 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.230465 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.311718 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.356123 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.514915 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.629686 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.715093 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.756809 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.798274 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.932647 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 16 13:07:52 crc kubenswrapper[4816]: I0216 13:07:52.974036 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.051848 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.151754 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.212929 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.223387 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.338546 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.399030 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.439256 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.443842 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.444522 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.527303 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.648082 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.801482 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.822868 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.881913 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.889262 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.889730 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.900686 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.946199 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.952388 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 16 13:07:53 crc kubenswrapper[4816]: I0216 13:07:53.981394 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.082470 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.144246 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.195521 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.198561 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.202569 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.204018 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.236430 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.280180 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.386532 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.414889 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.492768 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.536322 4816 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.536386 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.536453 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.537258 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"35c876e81102a64d6df4a78388bfe8884c281ff9d03f79d1b777cf35771469bc"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.537410 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://35c876e81102a64d6df4a78388bfe8884c281ff9d03f79d1b777cf35771469bc" gracePeriod=30 Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.545708 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.606165 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.767685 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.863589 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.902697 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.953936 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.956553 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 16 13:07:54 crc kubenswrapper[4816]: I0216 13:07:54.997487 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.056239 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.134338 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.195089 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.226296 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.313129 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.326112 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.352972 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.444553 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.468015 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.534932 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.566510 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.629706 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.680490 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.687324 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 16 13:07:55 crc kubenswrapper[4816]: I0216 13:07:55.769041 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.031877 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.041141 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.064047 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.152507 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.209898 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.272704 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.281897 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.291483 4816 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.346303 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.503288 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.554269 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.667519 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.687991 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.716607 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.745262 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.779690 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.817061 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.847471 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.895465 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.895643 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.963735 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 16 13:07:56 crc kubenswrapper[4816]: I0216 13:07:56.998437 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.046313 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.198853 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.231178 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.256153 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.393241 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.436271 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.493526 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.694069 4816 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.723852 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.738012 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.786033 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.828044 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.829140 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.892716 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.897536 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.912882 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 16 13:07:57 crc kubenswrapper[4816]: I0216 13:07:57.997688 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.054203 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.141901 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.165277 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.208335 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.216864 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.254805 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.266224 4816 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.275966 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.464184 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.509635 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.511616 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.552032 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.572161 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.584314 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.603364 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.631003 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.656081 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.723776 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.725229 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.733828 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.736818 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.752594 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.767375 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 16 13:07:58 crc kubenswrapper[4816]: I0216 13:07:58.987779 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.049320 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.076225 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.087250 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.117623 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.161806 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.177504 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.203592 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.313527 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.379540 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.414997 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.539056 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.553884 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.652970 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.672785 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.677934 4816 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.685156 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.857175 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.910486 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.920157 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 16 13:07:59 crc kubenswrapper[4816]: I0216 13:07:59.933010 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.001566 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.022809 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.034005 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.061820 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.397888 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.415735 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.497534 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.506527 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.562910 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.692324 4816 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.698948 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.701911 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.705098 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.705157 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.711574 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.728324 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.729449 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=20.729430377 podStartE2EDuration="20.729430377s" podCreationTimestamp="2026-02-16 13:07:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:08:00.723599769 +0000 UTC m=+280.050313497" watchObservedRunningTime="2026-02-16 13:08:00.729430377 +0000 UTC m=+280.056144105" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.734356 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.847294 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.905400 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.921929 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 16 13:08:00 crc kubenswrapper[4816]: I0216 13:08:00.982180 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.069467 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.097691 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.116543 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.151463 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.227232 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.235104 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.259292 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.271136 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.284217 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.301738 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.301796 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.386551 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.533079 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.568231 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.586276 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.586605 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.604345 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.798747 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.939440 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.987454 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 16 13:08:01 crc kubenswrapper[4816]: I0216 13:08:01.998329 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.005296 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.042747 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.093309 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.093811 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.120432 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.137098 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.207098 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.244032 4816 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.244260 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855" gracePeriod=5 Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.280558 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.297610 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.308243 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.367322 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.477825 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.538500 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.680694 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.763900 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 16 13:08:02 crc kubenswrapper[4816]: I0216 13:08:02.971210 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 16 13:08:03 crc kubenswrapper[4816]: I0216 13:08:03.033850 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 16 13:08:03 crc kubenswrapper[4816]: I0216 13:08:03.044665 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 16 13:08:03 crc kubenswrapper[4816]: I0216 13:08:03.085458 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 16 13:08:03 crc kubenswrapper[4816]: I0216 13:08:03.335298 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 16 13:08:03 crc kubenswrapper[4816]: I0216 13:08:03.435363 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 16 13:08:03 crc kubenswrapper[4816]: I0216 13:08:03.536342 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 16 13:08:03 crc kubenswrapper[4816]: I0216 13:08:03.678395 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 16 13:08:03 crc kubenswrapper[4816]: I0216 13:08:03.699946 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 16 13:08:03 crc kubenswrapper[4816]: I0216 13:08:03.890024 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.081317 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.291559 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.346231 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.367806 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.438912 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.554880 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.629569 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.665325 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.717361 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 16 13:08:04 crc kubenswrapper[4816]: I0216 13:08:04.904089 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 16 13:08:05 crc kubenswrapper[4816]: I0216 13:08:05.111958 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 16 13:08:05 crc kubenswrapper[4816]: I0216 13:08:05.213649 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 16 13:08:05 crc kubenswrapper[4816]: I0216 13:08:05.232122 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 16 13:08:05 crc kubenswrapper[4816]: I0216 13:08:05.256905 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 16 13:08:05 crc kubenswrapper[4816]: I0216 13:08:05.312997 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 16 13:08:05 crc kubenswrapper[4816]: I0216 13:08:05.418385 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 16 13:08:05 crc kubenswrapper[4816]: I0216 13:08:05.431912 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 16 13:08:05 crc kubenswrapper[4816]: I0216 13:08:05.685771 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 16 13:08:06 crc kubenswrapper[4816]: I0216 13:08:06.238458 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 16 13:08:06 crc kubenswrapper[4816]: I0216 13:08:06.942382 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.842467 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.842735 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.956885 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.956996 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.956994 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.957076 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.957143 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.957172 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.957248 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.957388 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.957792 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.958030 4816 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.958052 4816 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.958064 4816 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.958074 4816 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:07 crc kubenswrapper[4816]: I0216 13:08:07.966683 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:08:08 crc kubenswrapper[4816]: I0216 13:08:08.059121 4816 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:08 crc kubenswrapper[4816]: I0216 13:08:08.262043 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 16 13:08:08 crc kubenswrapper[4816]: I0216 13:08:08.262087 4816 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855" exitCode=137 Feb 16 13:08:08 crc kubenswrapper[4816]: I0216 13:08:08.262129 4816 scope.go:117] "RemoveContainer" containerID="bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855" Feb 16 13:08:08 crc kubenswrapper[4816]: I0216 13:08:08.262241 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 16 13:08:08 crc kubenswrapper[4816]: I0216 13:08:08.283888 4816 scope.go:117] "RemoveContainer" containerID="bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855" Feb 16 13:08:08 crc kubenswrapper[4816]: E0216 13:08:08.284347 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855\": container with ID starting with bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855 not found: ID does not exist" containerID="bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855" Feb 16 13:08:08 crc kubenswrapper[4816]: I0216 13:08:08.284412 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855"} err="failed to get container status \"bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855\": rpc error: code = NotFound desc = could not find container \"bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855\": container with ID starting with bcc715ce7af276867bf4c87d1c93d9a71ac4654047d50b1d9ea8d924219c4855 not found: ID does not exist" Feb 16 13:08:09 crc kubenswrapper[4816]: I0216 13:08:09.406048 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.931676 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6f4r6"] Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.932448 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6f4r6" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerName="registry-server" containerID="cri-o://c0db03af5c6961bfa74dc979dce937737791d6d77d1c1ae1adf382048f1cda38" gracePeriod=30 Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.939480 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-89gd5"] Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.939866 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-89gd5" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerName="registry-server" containerID="cri-o://cf7e4d12ed49ab100fdc7da5e26468b3c5f25162474ea59585a0c448ccfd7807" gracePeriod=30 Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.955154 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-966kl"] Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.955413 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" podUID="9f11459f-3567-4ed0-8bf3-d55ca3507378" containerName="marketplace-operator" containerID="cri-o://5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a" gracePeriod=30 Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.965817 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbjwq"] Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.966095 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lbjwq" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" containerName="registry-server" containerID="cri-o://987d4079172833ec6055546a0286b29664d780e99079d9f067ead9eb90494486" gracePeriod=30 Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.971877 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wjhmn"] Feb 16 13:08:19 crc kubenswrapper[4816]: I0216 13:08:19.972158 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wjhmn" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerName="registry-server" containerID="cri-o://c4380e54e80a746059752fb8a240adda02c69160cdcd45d052efb9e3ec72a6d7" gracePeriod=30 Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.321038 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.334552 4816 generic.go:334] "Generic (PLEG): container finished" podID="7a4d9957-2535-428e-8a45-b092fa854f73" containerID="987d4079172833ec6055546a0286b29664d780e99079d9f067ead9eb90494486" exitCode=0 Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.334743 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbjwq" event={"ID":"7a4d9957-2535-428e-8a45-b092fa854f73","Type":"ContainerDied","Data":"987d4079172833ec6055546a0286b29664d780e99079d9f067ead9eb90494486"} Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.356781 4816 generic.go:334] "Generic (PLEG): container finished" podID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerID="c0db03af5c6961bfa74dc979dce937737791d6d77d1c1ae1adf382048f1cda38" exitCode=0 Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.356941 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f4r6" event={"ID":"7550d81a-724b-4009-80d0-fa650ea35fa8","Type":"ContainerDied","Data":"c0db03af5c6961bfa74dc979dce937737791d6d77d1c1ae1adf382048f1cda38"} Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.360168 4816 generic.go:334] "Generic (PLEG): container finished" podID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerID="cf7e4d12ed49ab100fdc7da5e26468b3c5f25162474ea59585a0c448ccfd7807" exitCode=0 Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.360346 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89gd5" event={"ID":"19812a43-b6b0-45dc-9c35-b3c787fd4ff1","Type":"ContainerDied","Data":"cf7e4d12ed49ab100fdc7da5e26468b3c5f25162474ea59585a0c448ccfd7807"} Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.361685 4816 generic.go:334] "Generic (PLEG): container finished" podID="9f11459f-3567-4ed0-8bf3-d55ca3507378" containerID="5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a" exitCode=0 Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.361829 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" event={"ID":"9f11459f-3567-4ed0-8bf3-d55ca3507378","Type":"ContainerDied","Data":"5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a"} Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.361919 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" event={"ID":"9f11459f-3567-4ed0-8bf3-d55ca3507378","Type":"ContainerDied","Data":"3d5e424ba7a68d97a32fb49c7f526601376795f9f808f6e0ead729b996475a89"} Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.361929 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-966kl" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.361957 4816 scope.go:117] "RemoveContainer" containerID="5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.368563 4816 generic.go:334] "Generic (PLEG): container finished" podID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerID="c4380e54e80a746059752fb8a240adda02c69160cdcd45d052efb9e3ec72a6d7" exitCode=0 Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.368609 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjhmn" event={"ID":"e359a80a-04b9-4544-85b7-b51c74bbef61","Type":"ContainerDied","Data":"c4380e54e80a746059752fb8a240adda02c69160cdcd45d052efb9e3ec72a6d7"} Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.381516 4816 scope.go:117] "RemoveContainer" containerID="5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a" Feb 16 13:08:20 crc kubenswrapper[4816]: E0216 13:08:20.382101 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a\": container with ID starting with 5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a not found: ID does not exist" containerID="5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.382145 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a"} err="failed to get container status \"5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a\": rpc error: code = NotFound desc = could not find container \"5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a\": container with ID starting with 5ba8e12cb80bf726c4bd106c765df3e5ed8da5fc97fb89915c8087378362470a not found: ID does not exist" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.437518 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.440668 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.491563 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.497347 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.510034 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-operator-metrics\") pod \"9f11459f-3567-4ed0-8bf3-d55ca3507378\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.510097 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lrtd\" (UniqueName: \"kubernetes.io/projected/9f11459f-3567-4ed0-8bf3-d55ca3507378-kube-api-access-8lrtd\") pod \"9f11459f-3567-4ed0-8bf3-d55ca3507378\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.510178 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-trusted-ca\") pod \"9f11459f-3567-4ed0-8bf3-d55ca3507378\" (UID: \"9f11459f-3567-4ed0-8bf3-d55ca3507378\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.511096 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "9f11459f-3567-4ed0-8bf3-d55ca3507378" (UID: "9f11459f-3567-4ed0-8bf3-d55ca3507378"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.516389 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "9f11459f-3567-4ed0-8bf3-d55ca3507378" (UID: "9f11459f-3567-4ed0-8bf3-d55ca3507378"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.516465 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f11459f-3567-4ed0-8bf3-d55ca3507378-kube-api-access-8lrtd" (OuterVolumeSpecName: "kube-api-access-8lrtd") pod "9f11459f-3567-4ed0-8bf3-d55ca3507378" (UID: "9f11459f-3567-4ed0-8bf3-d55ca3507378"). InnerVolumeSpecName "kube-api-access-8lrtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611212 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htr6p\" (UniqueName: \"kubernetes.io/projected/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-kube-api-access-htr6p\") pod \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611272 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-catalog-content\") pod \"e359a80a-04b9-4544-85b7-b51c74bbef61\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611313 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-utilities\") pod \"7a4d9957-2535-428e-8a45-b092fa854f73\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611335 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-utilities\") pod \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611361 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-catalog-content\") pod \"7550d81a-724b-4009-80d0-fa650ea35fa8\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611394 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-utilities\") pod \"7550d81a-724b-4009-80d0-fa650ea35fa8\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611426 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xrvr\" (UniqueName: \"kubernetes.io/projected/7550d81a-724b-4009-80d0-fa650ea35fa8-kube-api-access-6xrvr\") pod \"7550d81a-724b-4009-80d0-fa650ea35fa8\" (UID: \"7550d81a-724b-4009-80d0-fa650ea35fa8\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611450 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-utilities\") pod \"e359a80a-04b9-4544-85b7-b51c74bbef61\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611478 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxqn4\" (UniqueName: \"kubernetes.io/projected/e359a80a-04b9-4544-85b7-b51c74bbef61-kube-api-access-xxqn4\") pod \"e359a80a-04b9-4544-85b7-b51c74bbef61\" (UID: \"e359a80a-04b9-4544-85b7-b51c74bbef61\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611501 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-catalog-content\") pod \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\" (UID: \"19812a43-b6b0-45dc-9c35-b3c787fd4ff1\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611524 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh2gk\" (UniqueName: \"kubernetes.io/projected/7a4d9957-2535-428e-8a45-b092fa854f73-kube-api-access-bh2gk\") pod \"7a4d9957-2535-428e-8a45-b092fa854f73\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611541 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-catalog-content\") pod \"7a4d9957-2535-428e-8a45-b092fa854f73\" (UID: \"7a4d9957-2535-428e-8a45-b092fa854f73\") " Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611728 4816 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611741 4816 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f11459f-3567-4ed0-8bf3-d55ca3507378-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.611750 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lrtd\" (UniqueName: \"kubernetes.io/projected/9f11459f-3567-4ed0-8bf3-d55ca3507378-kube-api-access-8lrtd\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.612193 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-utilities" (OuterVolumeSpecName: "utilities") pod "19812a43-b6b0-45dc-9c35-b3c787fd4ff1" (UID: "19812a43-b6b0-45dc-9c35-b3c787fd4ff1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.612405 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-utilities" (OuterVolumeSpecName: "utilities") pod "7a4d9957-2535-428e-8a45-b092fa854f73" (UID: "7a4d9957-2535-428e-8a45-b092fa854f73"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.612614 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-utilities" (OuterVolumeSpecName: "utilities") pod "7550d81a-724b-4009-80d0-fa650ea35fa8" (UID: "7550d81a-724b-4009-80d0-fa650ea35fa8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.612683 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-utilities" (OuterVolumeSpecName: "utilities") pod "e359a80a-04b9-4544-85b7-b51c74bbef61" (UID: "e359a80a-04b9-4544-85b7-b51c74bbef61"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.613853 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7550d81a-724b-4009-80d0-fa650ea35fa8-kube-api-access-6xrvr" (OuterVolumeSpecName: "kube-api-access-6xrvr") pod "7550d81a-724b-4009-80d0-fa650ea35fa8" (UID: "7550d81a-724b-4009-80d0-fa650ea35fa8"). InnerVolumeSpecName "kube-api-access-6xrvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.614045 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e359a80a-04b9-4544-85b7-b51c74bbef61-kube-api-access-xxqn4" (OuterVolumeSpecName: "kube-api-access-xxqn4") pod "e359a80a-04b9-4544-85b7-b51c74bbef61" (UID: "e359a80a-04b9-4544-85b7-b51c74bbef61"). InnerVolumeSpecName "kube-api-access-xxqn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.614750 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-kube-api-access-htr6p" (OuterVolumeSpecName: "kube-api-access-htr6p") pod "19812a43-b6b0-45dc-9c35-b3c787fd4ff1" (UID: "19812a43-b6b0-45dc-9c35-b3c787fd4ff1"). InnerVolumeSpecName "kube-api-access-htr6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.614990 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a4d9957-2535-428e-8a45-b092fa854f73-kube-api-access-bh2gk" (OuterVolumeSpecName: "kube-api-access-bh2gk") pod "7a4d9957-2535-428e-8a45-b092fa854f73" (UID: "7a4d9957-2535-428e-8a45-b092fa854f73"). InnerVolumeSpecName "kube-api-access-bh2gk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.647443 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a4d9957-2535-428e-8a45-b092fa854f73" (UID: "7a4d9957-2535-428e-8a45-b092fa854f73"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.672598 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7550d81a-724b-4009-80d0-fa650ea35fa8" (UID: "7550d81a-724b-4009-80d0-fa650ea35fa8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.677166 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "19812a43-b6b0-45dc-9c35-b3c787fd4ff1" (UID: "19812a43-b6b0-45dc-9c35-b3c787fd4ff1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.690713 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-966kl"] Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.696697 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-966kl"] Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713304 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh2gk\" (UniqueName: \"kubernetes.io/projected/7a4d9957-2535-428e-8a45-b092fa854f73-kube-api-access-bh2gk\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713341 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713353 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htr6p\" (UniqueName: \"kubernetes.io/projected/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-kube-api-access-htr6p\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713366 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a4d9957-2535-428e-8a45-b092fa854f73-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713379 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713390 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713403 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7550d81a-724b-4009-80d0-fa650ea35fa8-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713415 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xrvr\" (UniqueName: \"kubernetes.io/projected/7550d81a-724b-4009-80d0-fa650ea35fa8-kube-api-access-6xrvr\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713426 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713437 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxqn4\" (UniqueName: \"kubernetes.io/projected/e359a80a-04b9-4544-85b7-b51c74bbef61-kube-api-access-xxqn4\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.713449 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19812a43-b6b0-45dc-9c35-b3c787fd4ff1-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.741223 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e359a80a-04b9-4544-85b7-b51c74bbef61" (UID: "e359a80a-04b9-4544-85b7-b51c74bbef61"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:08:20 crc kubenswrapper[4816]: I0216 13:08:20.815364 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e359a80a-04b9-4544-85b7-b51c74bbef61-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.167759 4816 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.376710 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89gd5" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.376699 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89gd5" event={"ID":"19812a43-b6b0-45dc-9c35-b3c787fd4ff1","Type":"ContainerDied","Data":"d41b6a4896160aae033f8fbd176c680759b56fce70e2b6bafd41628051ddbd9a"} Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.376886 4816 scope.go:117] "RemoveContainer" containerID="cf7e4d12ed49ab100fdc7da5e26468b3c5f25162474ea59585a0c448ccfd7807" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.381542 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wjhmn" event={"ID":"e359a80a-04b9-4544-85b7-b51c74bbef61","Type":"ContainerDied","Data":"63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635"} Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.381622 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wjhmn" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.385229 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbjwq" event={"ID":"7a4d9957-2535-428e-8a45-b092fa854f73","Type":"ContainerDied","Data":"b3929c7ef2dcfa351422cd51e6f7ffe323c26737444e8348fa27d90d8bd5319c"} Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.385371 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbjwq" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.395838 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f4r6" event={"ID":"7550d81a-724b-4009-80d0-fa650ea35fa8","Type":"ContainerDied","Data":"4a03afba67733c4bce78655e806378b333a5be62c9cc9829f73a81dd57c0caf2"} Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.396164 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6f4r6" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.403904 4816 scope.go:117] "RemoveContainer" containerID="d7763256c7a04cf1ddafe5af5d4738b13d7bfe4e84d456150136ac282ca20665" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.426678 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f11459f-3567-4ed0-8bf3-d55ca3507378" path="/var/lib/kubelet/pods/9f11459f-3567-4ed0-8bf3-d55ca3507378/volumes" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.427971 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-89gd5"] Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.428075 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-89gd5"] Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.428091 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wjhmn"] Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.428101 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wjhmn"] Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.433799 4816 scope.go:117] "RemoveContainer" containerID="717237571a7679e9e9a4ad4b433c7f01993d1a3c7983e750ac7e5bbe645e2af0" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.451809 4816 scope.go:117] "RemoveContainer" containerID="c4380e54e80a746059752fb8a240adda02c69160cdcd45d052efb9e3ec72a6d7" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.472781 4816 scope.go:117] "RemoveContainer" containerID="b547f6bec90a1d0f666814ce7d3324e9996d95ff0c42ae565634475efc87d784" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.477387 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6f4r6"] Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.484069 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6f4r6"] Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.488051 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbjwq"] Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.494422 4816 scope.go:117] "RemoveContainer" containerID="9dde6a7945500dd215a49ab801bc282fdebdb625627c4a5517e092a5e815c844" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.501704 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbjwq"] Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.518888 4816 scope.go:117] "RemoveContainer" containerID="987d4079172833ec6055546a0286b29664d780e99079d9f067ead9eb90494486" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.542163 4816 scope.go:117] "RemoveContainer" containerID="2f066f1b22502967ceb742e372de5938b6ebead430ed545c32fa195a450a35ef" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.554781 4816 scope.go:117] "RemoveContainer" containerID="412806ad72a9665d2ddf0f50e303a2d951058b01437017dbfb9f6e19814d7ca3" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.573842 4816 scope.go:117] "RemoveContainer" containerID="c0db03af5c6961bfa74dc979dce937737791d6d77d1c1ae1adf382048f1cda38" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.587532 4816 scope.go:117] "RemoveContainer" containerID="fb97fb1078f028784f6674ae914a089690f5ac6e42d695ef83aecb52fe6ce5b1" Feb 16 13:08:21 crc kubenswrapper[4816]: I0216 13:08:21.600310 4816 scope.go:117] "RemoveContainer" containerID="1e63127f510f65da1339df47f896c5bb63360a49aa9834f1f0dfb55196b16908" Feb 16 13:08:23 crc kubenswrapper[4816]: I0216 13:08:23.406480 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" path="/var/lib/kubelet/pods/19812a43-b6b0-45dc-9c35-b3c787fd4ff1/volumes" Feb 16 13:08:23 crc kubenswrapper[4816]: I0216 13:08:23.407479 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" path="/var/lib/kubelet/pods/7550d81a-724b-4009-80d0-fa650ea35fa8/volumes" Feb 16 13:08:23 crc kubenswrapper[4816]: I0216 13:08:23.410711 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" path="/var/lib/kubelet/pods/7a4d9957-2535-428e-8a45-b092fa854f73/volumes" Feb 16 13:08:23 crc kubenswrapper[4816]: I0216 13:08:23.412090 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" path="/var/lib/kubelet/pods/e359a80a-04b9-4544-85b7-b51c74bbef61/volumes" Feb 16 13:08:24 crc kubenswrapper[4816]: E0216 13:08:24.668643 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice/crio-63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19812a43_b6b0_45dc_9c35_b3c787fd4ff1.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:08:25 crc kubenswrapper[4816]: I0216 13:08:25.422407 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Feb 16 13:08:25 crc kubenswrapper[4816]: I0216 13:08:25.423983 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 16 13:08:25 crc kubenswrapper[4816]: I0216 13:08:25.424016 4816 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="35c876e81102a64d6df4a78388bfe8884c281ff9d03f79d1b777cf35771469bc" exitCode=137 Feb 16 13:08:25 crc kubenswrapper[4816]: I0216 13:08:25.424041 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"35c876e81102a64d6df4a78388bfe8884c281ff9d03f79d1b777cf35771469bc"} Feb 16 13:08:25 crc kubenswrapper[4816]: I0216 13:08:25.424061 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"26c8b2f5abb1f961b5e2e2f900370469d7dfba6d1e927a665ef833584e578c55"} Feb 16 13:08:25 crc kubenswrapper[4816]: I0216 13:08:25.424076 4816 scope.go:117] "RemoveContainer" containerID="81120ea4bcac949478d3c6fa8e11a55393dda2d42d2f1b7c831d635394afabda" Feb 16 13:08:25 crc kubenswrapper[4816]: I0216 13:08:25.467390 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 16 13:08:26 crc kubenswrapper[4816]: I0216 13:08:26.430415 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Feb 16 13:08:34 crc kubenswrapper[4816]: I0216 13:08:34.536188 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 16 13:08:34 crc kubenswrapper[4816]: I0216 13:08:34.540201 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 16 13:08:34 crc kubenswrapper[4816]: E0216 13:08:34.785498 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19812a43_b6b0_45dc_9c35_b3c787fd4ff1.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice/crio-63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:08:35 crc kubenswrapper[4816]: I0216 13:08:35.471956 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 16 13:08:44 crc kubenswrapper[4816]: E0216 13:08:44.898221 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice/crio-63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19812a43_b6b0_45dc_9c35_b3c787fd4ff1.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.972223 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5dcd76744d-dv4nh"] Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.972763 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" podUID="8f157dee-0cab-452b-8f32-1fd9e25ecd59" containerName="controller-manager" containerID="cri-o://c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40" gracePeriod=30 Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989020 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xx4rl"] Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989458 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989470 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989479 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989485 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989504 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerName="extract-utilities" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989511 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerName="extract-utilities" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989523 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerName="extract-content" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989529 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerName="extract-content" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989546 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" containerName="extract-content" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989554 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" containerName="extract-content" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989561 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989569 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989582 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerName="extract-content" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989588 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerName="extract-content" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989600 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerName="extract-utilities" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989606 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerName="extract-utilities" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989615 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" containerName="installer" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989621 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" containerName="installer" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989636 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f11459f-3567-4ed0-8bf3-d55ca3507378" containerName="marketplace-operator" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989643 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f11459f-3567-4ed0-8bf3-d55ca3507378" containerName="marketplace-operator" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989667 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerName="extract-utilities" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989673 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerName="extract-utilities" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989687 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" containerName="extract-utilities" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989693 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" containerName="extract-utilities" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989705 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989711 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989719 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989725 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: E0216 13:08:48.989744 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerName="extract-content" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989750 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerName="extract-content" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989947 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e359a80a-04b9-4544-85b7-b51c74bbef61" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989963 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="19812a43-b6b0-45dc-9c35-b3c787fd4ff1" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989977 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989985 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f11459f-3567-4ed0-8bf3-d55ca3507378" containerName="marketplace-operator" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.989999 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccee36b0-4da9-48a3-af2d-063ac451f44d" containerName="installer" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.990010 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a4d9957-2535-428e-8a45-b092fa854f73" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.990018 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7550d81a-724b-4009-80d0-fa650ea35fa8" containerName="registry-server" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.990540 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.994970 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.995764 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.995907 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 16 13:08:48 crc kubenswrapper[4816]: I0216 13:08:48.996306 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.001075 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.042710 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xx4rl"] Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.102379 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb"] Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.102887 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" podUID="78062d6d-e0f0-4659-add1-a4a4ea464c6f" containerName="route-controller-manager" containerID="cri-o://cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b" gracePeriod=30 Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.153839 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/edc67f7a-d508-4d46-b845-353aadc07314-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xx4rl\" (UID: \"edc67f7a-d508-4d46-b845-353aadc07314\") " pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.153883 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edc67f7a-d508-4d46-b845-353aadc07314-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xx4rl\" (UID: \"edc67f7a-d508-4d46-b845-353aadc07314\") " pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.153900 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-678hb\" (UniqueName: \"kubernetes.io/projected/edc67f7a-d508-4d46-b845-353aadc07314-kube-api-access-678hb\") pod \"marketplace-operator-79b997595-xx4rl\" (UID: \"edc67f7a-d508-4d46-b845-353aadc07314\") " pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.255398 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/edc67f7a-d508-4d46-b845-353aadc07314-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xx4rl\" (UID: \"edc67f7a-d508-4d46-b845-353aadc07314\") " pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.255451 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edc67f7a-d508-4d46-b845-353aadc07314-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xx4rl\" (UID: \"edc67f7a-d508-4d46-b845-353aadc07314\") " pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.255475 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-678hb\" (UniqueName: \"kubernetes.io/projected/edc67f7a-d508-4d46-b845-353aadc07314-kube-api-access-678hb\") pod \"marketplace-operator-79b997595-xx4rl\" (UID: \"edc67f7a-d508-4d46-b845-353aadc07314\") " pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.256577 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edc67f7a-d508-4d46-b845-353aadc07314-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xx4rl\" (UID: \"edc67f7a-d508-4d46-b845-353aadc07314\") " pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.268121 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/edc67f7a-d508-4d46-b845-353aadc07314-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xx4rl\" (UID: \"edc67f7a-d508-4d46-b845-353aadc07314\") " pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.294695 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-678hb\" (UniqueName: \"kubernetes.io/projected/edc67f7a-d508-4d46-b845-353aadc07314-kube-api-access-678hb\") pod \"marketplace-operator-79b997595-xx4rl\" (UID: \"edc67f7a-d508-4d46-b845-353aadc07314\") " pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.373726 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.381745 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.435407 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.551957 4816 generic.go:334] "Generic (PLEG): container finished" podID="8f157dee-0cab-452b-8f32-1fd9e25ecd59" containerID="c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40" exitCode=0 Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.552114 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" event={"ID":"8f157dee-0cab-452b-8f32-1fd9e25ecd59","Type":"ContainerDied","Data":"c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40"} Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.552194 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.552330 4816 scope.go:117] "RemoveContainer" containerID="c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.552295 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5dcd76744d-dv4nh" event={"ID":"8f157dee-0cab-452b-8f32-1fd9e25ecd59","Type":"ContainerDied","Data":"4469ffacf552d480d872a7ecd37953da133e2d507108ef85919b293b063d6c6c"} Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.554865 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.555238 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" event={"ID":"78062d6d-e0f0-4659-add1-a4a4ea464c6f","Type":"ContainerDied","Data":"cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b"} Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.554590 4816 generic.go:334] "Generic (PLEG): container finished" podID="78062d6d-e0f0-4659-add1-a4a4ea464c6f" containerID="cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b" exitCode=0 Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.555812 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb" event={"ID":"78062d6d-e0f0-4659-add1-a4a4ea464c6f","Type":"ContainerDied","Data":"c2423d13e8409294128dedd60a41eda69646b0b926998c6e60523b5c42f9d7c3"} Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.557925 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f157dee-0cab-452b-8f32-1fd9e25ecd59-serving-cert\") pod \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.558511 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8nr9\" (UniqueName: \"kubernetes.io/projected/78062d6d-e0f0-4659-add1-a4a4ea464c6f-kube-api-access-j8nr9\") pod \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.558537 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-client-ca\") pod \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.558568 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dth2\" (UniqueName: \"kubernetes.io/projected/8f157dee-0cab-452b-8f32-1fd9e25ecd59-kube-api-access-6dth2\") pod \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.558585 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-config\") pod \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.558628 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-client-ca\") pod \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.558702 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-config\") pod \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.558747 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78062d6d-e0f0-4659-add1-a4a4ea464c6f-serving-cert\") pod \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\" (UID: \"78062d6d-e0f0-4659-add1-a4a4ea464c6f\") " Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.558769 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-proxy-ca-bundles\") pod \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\" (UID: \"8f157dee-0cab-452b-8f32-1fd9e25ecd59\") " Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.559616 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8f157dee-0cab-452b-8f32-1fd9e25ecd59" (UID: "8f157dee-0cab-452b-8f32-1fd9e25ecd59"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.560118 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-client-ca" (OuterVolumeSpecName: "client-ca") pod "8f157dee-0cab-452b-8f32-1fd9e25ecd59" (UID: "8f157dee-0cab-452b-8f32-1fd9e25ecd59"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.561272 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-client-ca" (OuterVolumeSpecName: "client-ca") pod "78062d6d-e0f0-4659-add1-a4a4ea464c6f" (UID: "78062d6d-e0f0-4659-add1-a4a4ea464c6f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.561831 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-config" (OuterVolumeSpecName: "config") pod "78062d6d-e0f0-4659-add1-a4a4ea464c6f" (UID: "78062d6d-e0f0-4659-add1-a4a4ea464c6f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.561846 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-config" (OuterVolumeSpecName: "config") pod "8f157dee-0cab-452b-8f32-1fd9e25ecd59" (UID: "8f157dee-0cab-452b-8f32-1fd9e25ecd59"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.565645 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78062d6d-e0f0-4659-add1-a4a4ea464c6f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "78062d6d-e0f0-4659-add1-a4a4ea464c6f" (UID: "78062d6d-e0f0-4659-add1-a4a4ea464c6f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.565840 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f157dee-0cab-452b-8f32-1fd9e25ecd59-kube-api-access-6dth2" (OuterVolumeSpecName: "kube-api-access-6dth2") pod "8f157dee-0cab-452b-8f32-1fd9e25ecd59" (UID: "8f157dee-0cab-452b-8f32-1fd9e25ecd59"). InnerVolumeSpecName "kube-api-access-6dth2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.565915 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f157dee-0cab-452b-8f32-1fd9e25ecd59-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8f157dee-0cab-452b-8f32-1fd9e25ecd59" (UID: "8f157dee-0cab-452b-8f32-1fd9e25ecd59"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.566181 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78062d6d-e0f0-4659-add1-a4a4ea464c6f-kube-api-access-j8nr9" (OuterVolumeSpecName: "kube-api-access-j8nr9") pod "78062d6d-e0f0-4659-add1-a4a4ea464c6f" (UID: "78062d6d-e0f0-4659-add1-a4a4ea464c6f"). InnerVolumeSpecName "kube-api-access-j8nr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.568429 4816 scope.go:117] "RemoveContainer" containerID="c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40" Feb 16 13:08:49 crc kubenswrapper[4816]: E0216 13:08:49.570400 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40\": container with ID starting with c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40 not found: ID does not exist" containerID="c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.570449 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40"} err="failed to get container status \"c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40\": rpc error: code = NotFound desc = could not find container \"c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40\": container with ID starting with c6997369988068ef8730c09431bfdc45b4ee362fc996e45fc3ce5e5dd7c31d40 not found: ID does not exist" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.570569 4816 scope.go:117] "RemoveContainer" containerID="cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.594841 4816 scope.go:117] "RemoveContainer" containerID="cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b" Feb 16 13:08:49 crc kubenswrapper[4816]: E0216 13:08:49.595689 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b\": container with ID starting with cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b not found: ID does not exist" containerID="cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.595743 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b"} err="failed to get container status \"cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b\": rpc error: code = NotFound desc = could not find container \"cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b\": container with ID starting with cedb334dbae079e223d067382d7ae3ef67cc092d787db8a4405777e5c485477b not found: ID does not exist" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.659995 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f157dee-0cab-452b-8f32-1fd9e25ecd59-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.660035 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8nr9\" (UniqueName: \"kubernetes.io/projected/78062d6d-e0f0-4659-add1-a4a4ea464c6f-kube-api-access-j8nr9\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.660048 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.660059 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dth2\" (UniqueName: \"kubernetes.io/projected/8f157dee-0cab-452b-8f32-1fd9e25ecd59-kube-api-access-6dth2\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.660071 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78062d6d-e0f0-4659-add1-a4a4ea464c6f-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.660083 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.660094 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.660113 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78062d6d-e0f0-4659-add1-a4a4ea464c6f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.660123 4816 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8f157dee-0cab-452b-8f32-1fd9e25ecd59-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.835760 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xx4rl"] Feb 16 13:08:49 crc kubenswrapper[4816]: W0216 13:08:49.844146 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedc67f7a_d508_4d46_b845_353aadc07314.slice/crio-7be72a5ba1ab9edf17c51701d10a8297366926938a7672f1f9f156e12cbf3405 WatchSource:0}: Error finding container 7be72a5ba1ab9edf17c51701d10a8297366926938a7672f1f9f156e12cbf3405: Status 404 returned error can't find the container with id 7be72a5ba1ab9edf17c51701d10a8297366926938a7672f1f9f156e12cbf3405 Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.881018 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5dcd76744d-dv4nh"] Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.884152 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5dcd76744d-dv4nh"] Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.906464 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb"] Feb 16 13:08:49 crc kubenswrapper[4816]: I0216 13:08:49.910493 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78495b4c64-fftxb"] Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.328068 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-565f96c579-24629"] Feb 16 13:08:50 crc kubenswrapper[4816]: E0216 13:08:50.328580 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78062d6d-e0f0-4659-add1-a4a4ea464c6f" containerName="route-controller-manager" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.328594 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="78062d6d-e0f0-4659-add1-a4a4ea464c6f" containerName="route-controller-manager" Feb 16 13:08:50 crc kubenswrapper[4816]: E0216 13:08:50.328604 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f157dee-0cab-452b-8f32-1fd9e25ecd59" containerName="controller-manager" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.328611 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f157dee-0cab-452b-8f32-1fd9e25ecd59" containerName="controller-manager" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.328761 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="78062d6d-e0f0-4659-add1-a4a4ea464c6f" containerName="route-controller-manager" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.328781 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f157dee-0cab-452b-8f32-1fd9e25ecd59" containerName="controller-manager" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.329192 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.330944 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.331051 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.331430 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.331456 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.331706 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.331899 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59"] Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.331955 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.332553 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.334478 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.334693 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.335182 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.335360 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.335519 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.335634 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.340558 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.342184 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-565f96c579-24629"] Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.359904 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59"] Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.369027 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-client-ca\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.369075 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-proxy-ca-bundles\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.369101 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b06a7e4-cd64-44d0-9996-26abff878c64-serving-cert\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.369130 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjkl7\" (UniqueName: \"kubernetes.io/projected/580eb17f-33b5-4834-bc26-815d5c54dbae-kube-api-access-pjkl7\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.369159 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-config\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.369209 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdvmg\" (UniqueName: \"kubernetes.io/projected/5b06a7e4-cd64-44d0-9996-26abff878c64-kube-api-access-cdvmg\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.369259 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-config\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.369281 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/580eb17f-33b5-4834-bc26-815d5c54dbae-serving-cert\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.369310 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-client-ca\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.470491 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-config\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.470540 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/580eb17f-33b5-4834-bc26-815d5c54dbae-serving-cert\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.470569 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-client-ca\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.470604 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-client-ca\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.470620 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-proxy-ca-bundles\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.470639 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b06a7e4-cd64-44d0-9996-26abff878c64-serving-cert\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.470677 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjkl7\" (UniqueName: \"kubernetes.io/projected/580eb17f-33b5-4834-bc26-815d5c54dbae-kube-api-access-pjkl7\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.470705 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-config\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.470725 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdvmg\" (UniqueName: \"kubernetes.io/projected/5b06a7e4-cd64-44d0-9996-26abff878c64-kube-api-access-cdvmg\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.472025 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-config\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.472150 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-client-ca\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.472260 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-config\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.472448 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-proxy-ca-bundles\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.472589 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-client-ca\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.481602 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b06a7e4-cd64-44d0-9996-26abff878c64-serving-cert\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.482145 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/580eb17f-33b5-4834-bc26-815d5c54dbae-serving-cert\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.491859 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdvmg\" (UniqueName: \"kubernetes.io/projected/5b06a7e4-cd64-44d0-9996-26abff878c64-kube-api-access-cdvmg\") pod \"controller-manager-565f96c579-24629\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.494968 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjkl7\" (UniqueName: \"kubernetes.io/projected/580eb17f-33b5-4834-bc26-815d5c54dbae-kube-api-access-pjkl7\") pod \"route-controller-manager-59988bbdc9-q5g59\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.568673 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" event={"ID":"edc67f7a-d508-4d46-b845-353aadc07314","Type":"ContainerStarted","Data":"1788b041ef41d5005414c54df6449a1eda3414d82419d64bb5e01e62f2629011"} Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.568727 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" event={"ID":"edc67f7a-d508-4d46-b845-353aadc07314","Type":"ContainerStarted","Data":"7be72a5ba1ab9edf17c51701d10a8297366926938a7672f1f9f156e12cbf3405"} Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.568916 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.572849 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.582532 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xx4rl" podStartSLOduration=2.582511557 podStartE2EDuration="2.582511557s" podCreationTimestamp="2026-02-16 13:08:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:08:50.579760593 +0000 UTC m=+329.906474321" watchObservedRunningTime="2026-02-16 13:08:50.582511557 +0000 UTC m=+329.909225285" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.645154 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.654253 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:50 crc kubenswrapper[4816]: I0216 13:08:50.895614 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59"] Feb 16 13:08:50 crc kubenswrapper[4816]: W0216 13:08:50.908876 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod580eb17f_33b5_4834_bc26_815d5c54dbae.slice/crio-21aee6e7c3b86a88697b62837cc2b9574e126e6cd6edad69a84c73d36c11ab32 WatchSource:0}: Error finding container 21aee6e7c3b86a88697b62837cc2b9574e126e6cd6edad69a84c73d36c11ab32: Status 404 returned error can't find the container with id 21aee6e7c3b86a88697b62837cc2b9574e126e6cd6edad69a84c73d36c11ab32 Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.061423 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-565f96c579-24629"] Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.409748 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78062d6d-e0f0-4659-add1-a4a4ea464c6f" path="/var/lib/kubelet/pods/78062d6d-e0f0-4659-add1-a4a4ea464c6f/volumes" Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.410604 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f157dee-0cab-452b-8f32-1fd9e25ecd59" path="/var/lib/kubelet/pods/8f157dee-0cab-452b-8f32-1fd9e25ecd59/volumes" Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.573922 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" event={"ID":"580eb17f-33b5-4834-bc26-815d5c54dbae","Type":"ContainerStarted","Data":"92dbe3cf610dafb9114434dda500c5762160297a0bf428753decea5484796cd2"} Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.573973 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" event={"ID":"580eb17f-33b5-4834-bc26-815d5c54dbae","Type":"ContainerStarted","Data":"21aee6e7c3b86a88697b62837cc2b9574e126e6cd6edad69a84c73d36c11ab32"} Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.574163 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.575463 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-565f96c579-24629" event={"ID":"5b06a7e4-cd64-44d0-9996-26abff878c64","Type":"ContainerStarted","Data":"026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159"} Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.575502 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-565f96c579-24629" event={"ID":"5b06a7e4-cd64-44d0-9996-26abff878c64","Type":"ContainerStarted","Data":"2fb5400816de554f686d996ca73b57faa73e63523b22251b9e16f9b2ebb978a4"} Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.582577 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.649772 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" podStartSLOduration=2.649756225 podStartE2EDuration="2.649756225s" podCreationTimestamp="2026-02-16 13:08:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:08:51.609782801 +0000 UTC m=+330.936496529" watchObservedRunningTime="2026-02-16 13:08:51.649756225 +0000 UTC m=+330.976469953" Feb 16 13:08:51 crc kubenswrapper[4816]: I0216 13:08:51.668581 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-565f96c579-24629" podStartSLOduration=2.66856143 podStartE2EDuration="2.66856143s" podCreationTimestamp="2026-02-16 13:08:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:08:51.666253838 +0000 UTC m=+330.992967576" watchObservedRunningTime="2026-02-16 13:08:51.66856143 +0000 UTC m=+330.995275168" Feb 16 13:08:52 crc kubenswrapper[4816]: I0216 13:08:52.580613 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:52 crc kubenswrapper[4816]: I0216 13:08:52.584418 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:08:55 crc kubenswrapper[4816]: E0216 13:08:55.010457 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice/crio-63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19812a43_b6b0_45dc_9c35_b3c787fd4ff1.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:09:05 crc kubenswrapper[4816]: E0216 13:09:05.131803 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19812a43_b6b0_45dc_9c35_b3c787fd4ff1.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice/crio-63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.259210 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59"] Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.259803 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" podUID="580eb17f-33b5-4834-bc26-815d5c54dbae" containerName="route-controller-manager" containerID="cri-o://92dbe3cf610dafb9114434dda500c5762160297a0bf428753decea5484796cd2" gracePeriod=30 Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.677626 4816 generic.go:334] "Generic (PLEG): container finished" podID="580eb17f-33b5-4834-bc26-815d5c54dbae" containerID="92dbe3cf610dafb9114434dda500c5762160297a0bf428753decea5484796cd2" exitCode=0 Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.677721 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" event={"ID":"580eb17f-33b5-4834-bc26-815d5c54dbae","Type":"ContainerDied","Data":"92dbe3cf610dafb9114434dda500c5762160297a0bf428753decea5484796cd2"} Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.677992 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" event={"ID":"580eb17f-33b5-4834-bc26-815d5c54dbae","Type":"ContainerDied","Data":"21aee6e7c3b86a88697b62837cc2b9574e126e6cd6edad69a84c73d36c11ab32"} Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.678014 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21aee6e7c3b86a88697b62837cc2b9574e126e6cd6edad69a84c73d36c11ab32" Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.707416 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.898935 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/580eb17f-33b5-4834-bc26-815d5c54dbae-serving-cert\") pod \"580eb17f-33b5-4834-bc26-815d5c54dbae\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.899029 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-client-ca\") pod \"580eb17f-33b5-4834-bc26-815d5c54dbae\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.899081 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjkl7\" (UniqueName: \"kubernetes.io/projected/580eb17f-33b5-4834-bc26-815d5c54dbae-kube-api-access-pjkl7\") pod \"580eb17f-33b5-4834-bc26-815d5c54dbae\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.899129 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-config\") pod \"580eb17f-33b5-4834-bc26-815d5c54dbae\" (UID: \"580eb17f-33b5-4834-bc26-815d5c54dbae\") " Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.899878 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-config" (OuterVolumeSpecName: "config") pod "580eb17f-33b5-4834-bc26-815d5c54dbae" (UID: "580eb17f-33b5-4834-bc26-815d5c54dbae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.899990 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-client-ca" (OuterVolumeSpecName: "client-ca") pod "580eb17f-33b5-4834-bc26-815d5c54dbae" (UID: "580eb17f-33b5-4834-bc26-815d5c54dbae"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.904606 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/580eb17f-33b5-4834-bc26-815d5c54dbae-kube-api-access-pjkl7" (OuterVolumeSpecName: "kube-api-access-pjkl7") pod "580eb17f-33b5-4834-bc26-815d5c54dbae" (UID: "580eb17f-33b5-4834-bc26-815d5c54dbae"). InnerVolumeSpecName "kube-api-access-pjkl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:09:09 crc kubenswrapper[4816]: I0216 13:09:09.905333 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/580eb17f-33b5-4834-bc26-815d5c54dbae-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "580eb17f-33b5-4834-bc26-815d5c54dbae" (UID: "580eb17f-33b5-4834-bc26-815d5c54dbae"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.000248 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.000316 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/580eb17f-33b5-4834-bc26-815d5c54dbae-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.000337 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/580eb17f-33b5-4834-bc26-815d5c54dbae-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.000355 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjkl7\" (UniqueName: \"kubernetes.io/projected/580eb17f-33b5-4834-bc26-815d5c54dbae-kube-api-access-pjkl7\") on node \"crc\" DevicePath \"\"" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.345872 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj"] Feb 16 13:09:10 crc kubenswrapper[4816]: E0216 13:09:10.346449 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="580eb17f-33b5-4834-bc26-815d5c54dbae" containerName="route-controller-manager" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.346469 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="580eb17f-33b5-4834-bc26-815d5c54dbae" containerName="route-controller-manager" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.346640 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="580eb17f-33b5-4834-bc26-815d5c54dbae" containerName="route-controller-manager" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.347287 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.356513 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj"] Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.507019 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4159af3b-7f12-4af1-bc25-c9ebee632c27-client-ca\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.507074 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4159af3b-7f12-4af1-bc25-c9ebee632c27-config\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.507110 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnzv4\" (UniqueName: \"kubernetes.io/projected/4159af3b-7f12-4af1-bc25-c9ebee632c27-kube-api-access-mnzv4\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.507135 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4159af3b-7f12-4af1-bc25-c9ebee632c27-serving-cert\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.608381 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4159af3b-7f12-4af1-bc25-c9ebee632c27-client-ca\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.608437 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4159af3b-7f12-4af1-bc25-c9ebee632c27-config\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.608484 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnzv4\" (UniqueName: \"kubernetes.io/projected/4159af3b-7f12-4af1-bc25-c9ebee632c27-kube-api-access-mnzv4\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.608518 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4159af3b-7f12-4af1-bc25-c9ebee632c27-serving-cert\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.609273 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4159af3b-7f12-4af1-bc25-c9ebee632c27-client-ca\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.609699 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4159af3b-7f12-4af1-bc25-c9ebee632c27-config\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.614676 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4159af3b-7f12-4af1-bc25-c9ebee632c27-serving-cert\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.631406 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnzv4\" (UniqueName: \"kubernetes.io/projected/4159af3b-7f12-4af1-bc25-c9ebee632c27-kube-api-access-mnzv4\") pod \"route-controller-manager-66f8849bc9-wz5jj\" (UID: \"4159af3b-7f12-4af1-bc25-c9ebee632c27\") " pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.661963 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.684064 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59" Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.722892 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59"] Feb 16 13:09:10 crc kubenswrapper[4816]: I0216 13:09:10.729967 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59988bbdc9-q5g59"] Feb 16 13:09:11 crc kubenswrapper[4816]: I0216 13:09:11.066104 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj"] Feb 16 13:09:11 crc kubenswrapper[4816]: W0216 13:09:11.080710 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4159af3b_7f12_4af1_bc25_c9ebee632c27.slice/crio-70f0f5a70a5322c4e9f7378107be7f1423ca9084f34d3f0e15c931a60f3b3fff WatchSource:0}: Error finding container 70f0f5a70a5322c4e9f7378107be7f1423ca9084f34d3f0e15c931a60f3b3fff: Status 404 returned error can't find the container with id 70f0f5a70a5322c4e9f7378107be7f1423ca9084f34d3f0e15c931a60f3b3fff Feb 16 13:09:11 crc kubenswrapper[4816]: I0216 13:09:11.406041 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="580eb17f-33b5-4834-bc26-815d5c54dbae" path="/var/lib/kubelet/pods/580eb17f-33b5-4834-bc26-815d5c54dbae/volumes" Feb 16 13:09:11 crc kubenswrapper[4816]: I0216 13:09:11.688633 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" event={"ID":"4159af3b-7f12-4af1-bc25-c9ebee632c27","Type":"ContainerStarted","Data":"4e7f95b81ca5613cc6ab4d26a6fc3d24718253baec3b33cb01d255c6780f6fe3"} Feb 16 13:09:11 crc kubenswrapper[4816]: I0216 13:09:11.688695 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" event={"ID":"4159af3b-7f12-4af1-bc25-c9ebee632c27","Type":"ContainerStarted","Data":"70f0f5a70a5322c4e9f7378107be7f1423ca9084f34d3f0e15c931a60f3b3fff"} Feb 16 13:09:11 crc kubenswrapper[4816]: I0216 13:09:11.688881 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:11 crc kubenswrapper[4816]: I0216 13:09:11.693687 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" Feb 16 13:09:11 crc kubenswrapper[4816]: I0216 13:09:11.703263 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-66f8849bc9-wz5jj" podStartSLOduration=2.7032506659999997 podStartE2EDuration="2.703250666s" podCreationTimestamp="2026-02-16 13:09:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:09:11.702470995 +0000 UTC m=+351.029184733" watchObservedRunningTime="2026-02-16 13:09:11.703250666 +0000 UTC m=+351.029964394" Feb 16 13:09:15 crc kubenswrapper[4816]: E0216 13:09:15.260372 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode359a80a_04b9_4544_85b7_b51c74bbef61.slice/crio-63787e5e789cb811f7cb86423b6533db08173557e9c7ea9767d150f5d2bd1635\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19812a43_b6b0_45dc_9c35_b3c787fd4ff1.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.072242 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tcn99"] Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.073583 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.076277 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.092751 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcn99"] Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.198548 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtqs6\" (UniqueName: \"kubernetes.io/projected/2b0e4282-5491-447a-ad48-d13db1ea995d-kube-api-access-xtqs6\") pod \"redhat-marketplace-tcn99\" (UID: \"2b0e4282-5491-447a-ad48-d13db1ea995d\") " pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.198624 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b0e4282-5491-447a-ad48-d13db1ea995d-utilities\") pod \"redhat-marketplace-tcn99\" (UID: \"2b0e4282-5491-447a-ad48-d13db1ea995d\") " pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.198774 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b0e4282-5491-447a-ad48-d13db1ea995d-catalog-content\") pod \"redhat-marketplace-tcn99\" (UID: \"2b0e4282-5491-447a-ad48-d13db1ea995d\") " pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.272746 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rtqdh"] Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.275153 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.280095 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.280364 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rtqdh"] Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.299671 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b0e4282-5491-447a-ad48-d13db1ea995d-catalog-content\") pod \"redhat-marketplace-tcn99\" (UID: \"2b0e4282-5491-447a-ad48-d13db1ea995d\") " pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.299798 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtqs6\" (UniqueName: \"kubernetes.io/projected/2b0e4282-5491-447a-ad48-d13db1ea995d-kube-api-access-xtqs6\") pod \"redhat-marketplace-tcn99\" (UID: \"2b0e4282-5491-447a-ad48-d13db1ea995d\") " pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.299842 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b0e4282-5491-447a-ad48-d13db1ea995d-utilities\") pod \"redhat-marketplace-tcn99\" (UID: \"2b0e4282-5491-447a-ad48-d13db1ea995d\") " pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.300878 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b0e4282-5491-447a-ad48-d13db1ea995d-catalog-content\") pod \"redhat-marketplace-tcn99\" (UID: \"2b0e4282-5491-447a-ad48-d13db1ea995d\") " pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.300966 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b0e4282-5491-447a-ad48-d13db1ea995d-utilities\") pod \"redhat-marketplace-tcn99\" (UID: \"2b0e4282-5491-447a-ad48-d13db1ea995d\") " pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.316463 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtqs6\" (UniqueName: \"kubernetes.io/projected/2b0e4282-5491-447a-ad48-d13db1ea995d-kube-api-access-xtqs6\") pod \"redhat-marketplace-tcn99\" (UID: \"2b0e4282-5491-447a-ad48-d13db1ea995d\") " pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.393578 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.400335 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw2pz\" (UniqueName: \"kubernetes.io/projected/f61f8b10-6688-4ecf-a8fb-110be20f0314-kube-api-access-jw2pz\") pod \"redhat-operators-rtqdh\" (UID: \"f61f8b10-6688-4ecf-a8fb-110be20f0314\") " pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.400399 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61f8b10-6688-4ecf-a8fb-110be20f0314-catalog-content\") pod \"redhat-operators-rtqdh\" (UID: \"f61f8b10-6688-4ecf-a8fb-110be20f0314\") " pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.400450 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61f8b10-6688-4ecf-a8fb-110be20f0314-utilities\") pod \"redhat-operators-rtqdh\" (UID: \"f61f8b10-6688-4ecf-a8fb-110be20f0314\") " pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.501694 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61f8b10-6688-4ecf-a8fb-110be20f0314-catalog-content\") pod \"redhat-operators-rtqdh\" (UID: \"f61f8b10-6688-4ecf-a8fb-110be20f0314\") " pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.501796 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61f8b10-6688-4ecf-a8fb-110be20f0314-utilities\") pod \"redhat-operators-rtqdh\" (UID: \"f61f8b10-6688-4ecf-a8fb-110be20f0314\") " pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.501936 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw2pz\" (UniqueName: \"kubernetes.io/projected/f61f8b10-6688-4ecf-a8fb-110be20f0314-kube-api-access-jw2pz\") pod \"redhat-operators-rtqdh\" (UID: \"f61f8b10-6688-4ecf-a8fb-110be20f0314\") " pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.503297 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61f8b10-6688-4ecf-a8fb-110be20f0314-utilities\") pod \"redhat-operators-rtqdh\" (UID: \"f61f8b10-6688-4ecf-a8fb-110be20f0314\") " pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.503320 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61f8b10-6688-4ecf-a8fb-110be20f0314-catalog-content\") pod \"redhat-operators-rtqdh\" (UID: \"f61f8b10-6688-4ecf-a8fb-110be20f0314\") " pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.519859 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw2pz\" (UniqueName: \"kubernetes.io/projected/f61f8b10-6688-4ecf-a8fb-110be20f0314-kube-api-access-jw2pz\") pod \"redhat-operators-rtqdh\" (UID: \"f61f8b10-6688-4ecf-a8fb-110be20f0314\") " pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.598011 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:26 crc kubenswrapper[4816]: I0216 13:09:26.766917 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tcn99"] Feb 16 13:09:26 crc kubenswrapper[4816]: W0216 13:09:26.778065 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b0e4282_5491_447a_ad48_d13db1ea995d.slice/crio-e279ddb7977525f015ca23c478396bb4198148d86442a6ed08ecfb59b6688d62 WatchSource:0}: Error finding container e279ddb7977525f015ca23c478396bb4198148d86442a6ed08ecfb59b6688d62: Status 404 returned error can't find the container with id e279ddb7977525f015ca23c478396bb4198148d86442a6ed08ecfb59b6688d62 Feb 16 13:09:27 crc kubenswrapper[4816]: I0216 13:09:27.027456 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rtqdh"] Feb 16 13:09:27 crc kubenswrapper[4816]: W0216 13:09:27.031197 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf61f8b10_6688_4ecf_a8fb_110be20f0314.slice/crio-94be9d678ab00900e1731cfc639c5de83caab0ee5e02c22faf379d98524849f9 WatchSource:0}: Error finding container 94be9d678ab00900e1731cfc639c5de83caab0ee5e02c22faf379d98524849f9: Status 404 returned error can't find the container with id 94be9d678ab00900e1731cfc639c5de83caab0ee5e02c22faf379d98524849f9 Feb 16 13:09:27 crc kubenswrapper[4816]: I0216 13:09:27.778893 4816 generic.go:334] "Generic (PLEG): container finished" podID="2b0e4282-5491-447a-ad48-d13db1ea995d" containerID="975fde5e54e53d8a9f7653aed5cdadc23b0e98f9118ad80335ae3e3074bdd47a" exitCode=0 Feb 16 13:09:27 crc kubenswrapper[4816]: I0216 13:09:27.778995 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcn99" event={"ID":"2b0e4282-5491-447a-ad48-d13db1ea995d","Type":"ContainerDied","Data":"975fde5e54e53d8a9f7653aed5cdadc23b0e98f9118ad80335ae3e3074bdd47a"} Feb 16 13:09:27 crc kubenswrapper[4816]: I0216 13:09:27.779274 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcn99" event={"ID":"2b0e4282-5491-447a-ad48-d13db1ea995d","Type":"ContainerStarted","Data":"e279ddb7977525f015ca23c478396bb4198148d86442a6ed08ecfb59b6688d62"} Feb 16 13:09:27 crc kubenswrapper[4816]: I0216 13:09:27.781670 4816 generic.go:334] "Generic (PLEG): container finished" podID="f61f8b10-6688-4ecf-a8fb-110be20f0314" containerID="1e911cbe8594fec5c38d2909470aebd990d7336b92f9a1d66f8391f179e57724" exitCode=0 Feb 16 13:09:27 crc kubenswrapper[4816]: I0216 13:09:27.781736 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtqdh" event={"ID":"f61f8b10-6688-4ecf-a8fb-110be20f0314","Type":"ContainerDied","Data":"1e911cbe8594fec5c38d2909470aebd990d7336b92f9a1d66f8391f179e57724"} Feb 16 13:09:27 crc kubenswrapper[4816]: I0216 13:09:27.781774 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtqdh" event={"ID":"f61f8b10-6688-4ecf-a8fb-110be20f0314","Type":"ContainerStarted","Data":"94be9d678ab00900e1731cfc639c5de83caab0ee5e02c22faf379d98524849f9"} Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.473822 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9cbbd"] Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.474810 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.476252 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.488507 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9cbbd"] Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.627169 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/286af964-57b7-4758-807c-14cc3d67f1e9-utilities\") pod \"certified-operators-9cbbd\" (UID: \"286af964-57b7-4758-807c-14cc3d67f1e9\") " pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.627204 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfvht\" (UniqueName: \"kubernetes.io/projected/286af964-57b7-4758-807c-14cc3d67f1e9-kube-api-access-rfvht\") pod \"certified-operators-9cbbd\" (UID: \"286af964-57b7-4758-807c-14cc3d67f1e9\") " pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.627226 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/286af964-57b7-4758-807c-14cc3d67f1e9-catalog-content\") pod \"certified-operators-9cbbd\" (UID: \"286af964-57b7-4758-807c-14cc3d67f1e9\") " pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.669229 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-krkwb"] Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.670189 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.674996 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.679287 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-krkwb"] Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.728859 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/286af964-57b7-4758-807c-14cc3d67f1e9-utilities\") pod \"certified-operators-9cbbd\" (UID: \"286af964-57b7-4758-807c-14cc3d67f1e9\") " pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.729150 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfvht\" (UniqueName: \"kubernetes.io/projected/286af964-57b7-4758-807c-14cc3d67f1e9-kube-api-access-rfvht\") pod \"certified-operators-9cbbd\" (UID: \"286af964-57b7-4758-807c-14cc3d67f1e9\") " pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.729434 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/286af964-57b7-4758-807c-14cc3d67f1e9-catalog-content\") pod \"certified-operators-9cbbd\" (UID: \"286af964-57b7-4758-807c-14cc3d67f1e9\") " pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.729268 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/286af964-57b7-4758-807c-14cc3d67f1e9-utilities\") pod \"certified-operators-9cbbd\" (UID: \"286af964-57b7-4758-807c-14cc3d67f1e9\") " pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.729712 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/286af964-57b7-4758-807c-14cc3d67f1e9-catalog-content\") pod \"certified-operators-9cbbd\" (UID: \"286af964-57b7-4758-807c-14cc3d67f1e9\") " pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.750942 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfvht\" (UniqueName: \"kubernetes.io/projected/286af964-57b7-4758-807c-14cc3d67f1e9-kube-api-access-rfvht\") pod \"certified-operators-9cbbd\" (UID: \"286af964-57b7-4758-807c-14cc3d67f1e9\") " pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.789077 4816 generic.go:334] "Generic (PLEG): container finished" podID="2b0e4282-5491-447a-ad48-d13db1ea995d" containerID="2680dea97bba86e174a242e3f3352931b4863eb85df7d58c81595de1b1f913d5" exitCode=0 Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.789140 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcn99" event={"ID":"2b0e4282-5491-447a-ad48-d13db1ea995d","Type":"ContainerDied","Data":"2680dea97bba86e174a242e3f3352931b4863eb85df7d58c81595de1b1f913d5"} Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.791268 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtqdh" event={"ID":"f61f8b10-6688-4ecf-a8fb-110be20f0314","Type":"ContainerStarted","Data":"a6abb520360d1e2429f71d724556c403df36eff499bb70b7f536b7aad977c914"} Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.805174 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.830286 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-catalog-content\") pod \"community-operators-krkwb\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.830322 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwgvk\" (UniqueName: \"kubernetes.io/projected/d2519664-9d4a-43d0-847e-ffdb49a03d02-kube-api-access-gwgvk\") pod \"community-operators-krkwb\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.830370 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-utilities\") pod \"community-operators-krkwb\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.932379 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-catalog-content\") pod \"community-operators-krkwb\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.932441 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwgvk\" (UniqueName: \"kubernetes.io/projected/d2519664-9d4a-43d0-847e-ffdb49a03d02-kube-api-access-gwgvk\") pod \"community-operators-krkwb\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.932493 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-utilities\") pod \"community-operators-krkwb\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.933039 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-catalog-content\") pod \"community-operators-krkwb\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.933246 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-utilities\") pod \"community-operators-krkwb\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:28 crc kubenswrapper[4816]: I0216 13:09:28.952871 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwgvk\" (UniqueName: \"kubernetes.io/projected/d2519664-9d4a-43d0-847e-ffdb49a03d02-kube-api-access-gwgvk\") pod \"community-operators-krkwb\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.046521 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.196132 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9cbbd"] Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.462195 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-krkwb"] Feb 16 13:09:29 crc kubenswrapper[4816]: W0216 13:09:29.485023 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2519664_9d4a_43d0_847e_ffdb49a03d02.slice/crio-0715673ea8133207bac679caaf2490360f37f4eced02e377811ab167b0be4e1d WatchSource:0}: Error finding container 0715673ea8133207bac679caaf2490360f37f4eced02e377811ab167b0be4e1d: Status 404 returned error can't find the container with id 0715673ea8133207bac679caaf2490360f37f4eced02e377811ab167b0be4e1d Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.632204 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fjbzt"] Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.633384 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.644131 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fjbzt"] Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.729612 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-565f96c579-24629"] Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.729800 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-565f96c579-24629" podUID="5b06a7e4-cd64-44d0-9996-26abff878c64" containerName="controller-manager" containerID="cri-o://026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159" gracePeriod=30 Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.744141 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/232ef8fd-ca40-40b9-ba27-4918dba2edc0-registry-certificates\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.744193 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/232ef8fd-ca40-40b9-ba27-4918dba2edc0-registry-tls\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.744225 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.744356 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/232ef8fd-ca40-40b9-ba27-4918dba2edc0-bound-sa-token\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.744443 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/232ef8fd-ca40-40b9-ba27-4918dba2edc0-trusted-ca\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.744488 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/232ef8fd-ca40-40b9-ba27-4918dba2edc0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.744513 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46csm\" (UniqueName: \"kubernetes.io/projected/232ef8fd-ca40-40b9-ba27-4918dba2edc0-kube-api-access-46csm\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.744626 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/232ef8fd-ca40-40b9-ba27-4918dba2edc0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.775141 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.796953 4816 generic.go:334] "Generic (PLEG): container finished" podID="286af964-57b7-4758-807c-14cc3d67f1e9" containerID="97a2069d8fc2db62fb2917cd8c78baf8625f3d8b5f23e6d6751f657bf685e739" exitCode=0 Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.796997 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9cbbd" event={"ID":"286af964-57b7-4758-807c-14cc3d67f1e9","Type":"ContainerDied","Data":"97a2069d8fc2db62fb2917cd8c78baf8625f3d8b5f23e6d6751f657bf685e739"} Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.797043 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9cbbd" event={"ID":"286af964-57b7-4758-807c-14cc3d67f1e9","Type":"ContainerStarted","Data":"166ee2202cd35d28336fa5fc9de69ccca2fa5d8fb82cc1cd1fda9fa025082459"} Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.798879 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tcn99" event={"ID":"2b0e4282-5491-447a-ad48-d13db1ea995d","Type":"ContainerStarted","Data":"5ccf966b47b381c6b860a941826a373dc68b1e37352b68a2a6d59b378c782a35"} Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.802006 4816 generic.go:334] "Generic (PLEG): container finished" podID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerID="1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a" exitCode=0 Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.802092 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkwb" event={"ID":"d2519664-9d4a-43d0-847e-ffdb49a03d02","Type":"ContainerDied","Data":"1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a"} Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.802122 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkwb" event={"ID":"d2519664-9d4a-43d0-847e-ffdb49a03d02","Type":"ContainerStarted","Data":"0715673ea8133207bac679caaf2490360f37f4eced02e377811ab167b0be4e1d"} Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.805425 4816 generic.go:334] "Generic (PLEG): container finished" podID="f61f8b10-6688-4ecf-a8fb-110be20f0314" containerID="a6abb520360d1e2429f71d724556c403df36eff499bb70b7f536b7aad977c914" exitCode=0 Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.805471 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtqdh" event={"ID":"f61f8b10-6688-4ecf-a8fb-110be20f0314","Type":"ContainerDied","Data":"a6abb520360d1e2429f71d724556c403df36eff499bb70b7f536b7aad977c914"} Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.841020 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tcn99" podStartSLOduration=2.435836735 podStartE2EDuration="3.840998993s" podCreationTimestamp="2026-02-16 13:09:26 +0000 UTC" firstStartedPulling="2026-02-16 13:09:27.781646527 +0000 UTC m=+367.108360275" lastFinishedPulling="2026-02-16 13:09:29.186808805 +0000 UTC m=+368.513522533" observedRunningTime="2026-02-16 13:09:29.8386433 +0000 UTC m=+369.165357028" watchObservedRunningTime="2026-02-16 13:09:29.840998993 +0000 UTC m=+369.167712731" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.845286 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/232ef8fd-ca40-40b9-ba27-4918dba2edc0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.845338 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/232ef8fd-ca40-40b9-ba27-4918dba2edc0-registry-certificates\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.845369 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/232ef8fd-ca40-40b9-ba27-4918dba2edc0-registry-tls\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.845407 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/232ef8fd-ca40-40b9-ba27-4918dba2edc0-bound-sa-token\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.845454 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/232ef8fd-ca40-40b9-ba27-4918dba2edc0-trusted-ca\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.845482 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/232ef8fd-ca40-40b9-ba27-4918dba2edc0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.845506 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46csm\" (UniqueName: \"kubernetes.io/projected/232ef8fd-ca40-40b9-ba27-4918dba2edc0-kube-api-access-46csm\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.845785 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/232ef8fd-ca40-40b9-ba27-4918dba2edc0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.846670 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/232ef8fd-ca40-40b9-ba27-4918dba2edc0-registry-certificates\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.846712 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/232ef8fd-ca40-40b9-ba27-4918dba2edc0-trusted-ca\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.849684 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/232ef8fd-ca40-40b9-ba27-4918dba2edc0-registry-tls\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.855247 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/232ef8fd-ca40-40b9-ba27-4918dba2edc0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.862388 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46csm\" (UniqueName: \"kubernetes.io/projected/232ef8fd-ca40-40b9-ba27-4918dba2edc0-kube-api-access-46csm\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:29 crc kubenswrapper[4816]: I0216 13:09:29.862690 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/232ef8fd-ca40-40b9-ba27-4918dba2edc0-bound-sa-token\") pod \"image-registry-66df7c8f76-fjbzt\" (UID: \"232ef8fd-ca40-40b9-ba27-4918dba2edc0\") " pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.024145 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.080630 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.208366 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fjbzt"] Feb 16 13:09:30 crc kubenswrapper[4816]: W0216 13:09:30.219547 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod232ef8fd_ca40_40b9_ba27_4918dba2edc0.slice/crio-484322de10125b6efd088e6cbb238e0dd5f38d99abb7feecc70bfadc9d7e5aca WatchSource:0}: Error finding container 484322de10125b6efd088e6cbb238e0dd5f38d99abb7feecc70bfadc9d7e5aca: Status 404 returned error can't find the container with id 484322de10125b6efd088e6cbb238e0dd5f38d99abb7feecc70bfadc9d7e5aca Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.250668 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdvmg\" (UniqueName: \"kubernetes.io/projected/5b06a7e4-cd64-44d0-9996-26abff878c64-kube-api-access-cdvmg\") pod \"5b06a7e4-cd64-44d0-9996-26abff878c64\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.250745 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-config\") pod \"5b06a7e4-cd64-44d0-9996-26abff878c64\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.250794 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-proxy-ca-bundles\") pod \"5b06a7e4-cd64-44d0-9996-26abff878c64\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.250835 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b06a7e4-cd64-44d0-9996-26abff878c64-serving-cert\") pod \"5b06a7e4-cd64-44d0-9996-26abff878c64\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.250902 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-client-ca\") pod \"5b06a7e4-cd64-44d0-9996-26abff878c64\" (UID: \"5b06a7e4-cd64-44d0-9996-26abff878c64\") " Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.251701 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-client-ca" (OuterVolumeSpecName: "client-ca") pod "5b06a7e4-cd64-44d0-9996-26abff878c64" (UID: "5b06a7e4-cd64-44d0-9996-26abff878c64"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.251867 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5b06a7e4-cd64-44d0-9996-26abff878c64" (UID: "5b06a7e4-cd64-44d0-9996-26abff878c64"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.252414 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-config" (OuterVolumeSpecName: "config") pod "5b06a7e4-cd64-44d0-9996-26abff878c64" (UID: "5b06a7e4-cd64-44d0-9996-26abff878c64"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.255961 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b06a7e4-cd64-44d0-9996-26abff878c64-kube-api-access-cdvmg" (OuterVolumeSpecName: "kube-api-access-cdvmg") pod "5b06a7e4-cd64-44d0-9996-26abff878c64" (UID: "5b06a7e4-cd64-44d0-9996-26abff878c64"). InnerVolumeSpecName "kube-api-access-cdvmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.256075 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b06a7e4-cd64-44d0-9996-26abff878c64-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5b06a7e4-cd64-44d0-9996-26abff878c64" (UID: "5b06a7e4-cd64-44d0-9996-26abff878c64"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.352535 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdvmg\" (UniqueName: \"kubernetes.io/projected/5b06a7e4-cd64-44d0-9996-26abff878c64-kube-api-access-cdvmg\") on node \"crc\" DevicePath \"\"" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.352568 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.352578 4816 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.352586 4816 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b06a7e4-cd64-44d0-9996-26abff878c64-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.352595 4816 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b06a7e4-cd64-44d0-9996-26abff878c64-client-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.812553 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtqdh" event={"ID":"f61f8b10-6688-4ecf-a8fb-110be20f0314","Type":"ContainerStarted","Data":"c7cbec6a3b5bd10b20d5ee5ebbb3e1b6f69c0ac53724aee027702f2ef40900ee"} Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.814169 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9cbbd" event={"ID":"286af964-57b7-4758-807c-14cc3d67f1e9","Type":"ContainerStarted","Data":"548e5703a2dcf1fe304581b78a6d141e01954a12566de9e6fea67abe5eb191b2"} Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.815683 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkwb" event={"ID":"d2519664-9d4a-43d0-847e-ffdb49a03d02","Type":"ContainerStarted","Data":"7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab"} Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.816807 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" event={"ID":"232ef8fd-ca40-40b9-ba27-4918dba2edc0","Type":"ContainerStarted","Data":"a474fc74733ef5ddf8b78516ae5fef8a10c6a1d9ff492cd6affa6d7d8bb220d1"} Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.816836 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" event={"ID":"232ef8fd-ca40-40b9-ba27-4918dba2edc0","Type":"ContainerStarted","Data":"484322de10125b6efd088e6cbb238e0dd5f38d99abb7feecc70bfadc9d7e5aca"} Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.816889 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.825606 4816 generic.go:334] "Generic (PLEG): container finished" podID="5b06a7e4-cd64-44d0-9996-26abff878c64" containerID="026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159" exitCode=0 Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.825695 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-565f96c579-24629" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.825688 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-565f96c579-24629" event={"ID":"5b06a7e4-cd64-44d0-9996-26abff878c64","Type":"ContainerDied","Data":"026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159"} Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.825881 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-565f96c579-24629" event={"ID":"5b06a7e4-cd64-44d0-9996-26abff878c64","Type":"ContainerDied","Data":"2fb5400816de554f686d996ca73b57faa73e63523b22251b9e16f9b2ebb978a4"} Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.825943 4816 scope.go:117] "RemoveContainer" containerID="026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.842265 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rtqdh" podStartSLOduration=2.182168699 podStartE2EDuration="4.842248987s" podCreationTimestamp="2026-02-16 13:09:26 +0000 UTC" firstStartedPulling="2026-02-16 13:09:27.783071965 +0000 UTC m=+367.109785713" lastFinishedPulling="2026-02-16 13:09:30.443152273 +0000 UTC m=+369.769866001" observedRunningTime="2026-02-16 13:09:30.838795085 +0000 UTC m=+370.165508823" watchObservedRunningTime="2026-02-16 13:09:30.842248987 +0000 UTC m=+370.168962715" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.857904 4816 scope.go:117] "RemoveContainer" containerID="026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159" Feb 16 13:09:30 crc kubenswrapper[4816]: E0216 13:09:30.859023 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159\": container with ID starting with 026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159 not found: ID does not exist" containerID="026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.859058 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159"} err="failed to get container status \"026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159\": rpc error: code = NotFound desc = could not find container \"026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159\": container with ID starting with 026a5200ed88c2a58cf522d7381308f8347ed1124b6b6d9505130f1bb8c96159 not found: ID does not exist" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.891170 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" podStartSLOduration=1.891150311 podStartE2EDuration="1.891150311s" podCreationTimestamp="2026-02-16 13:09:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:09:30.889888847 +0000 UTC m=+370.216602595" watchObservedRunningTime="2026-02-16 13:09:30.891150311 +0000 UTC m=+370.217864039" Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.905572 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-565f96c579-24629"] Feb 16 13:09:30 crc kubenswrapper[4816]: I0216 13:09:30.915963 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-565f96c579-24629"] Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.406014 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b06a7e4-cd64-44d0-9996-26abff878c64" path="/var/lib/kubelet/pods/5b06a7e4-cd64-44d0-9996-26abff878c64/volumes" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.593512 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-658758c95c-lrq4q"] Feb 16 13:09:31 crc kubenswrapper[4816]: E0216 13:09:31.593792 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b06a7e4-cd64-44d0-9996-26abff878c64" containerName="controller-manager" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.593808 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b06a7e4-cd64-44d0-9996-26abff878c64" containerName="controller-manager" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.593919 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b06a7e4-cd64-44d0-9996-26abff878c64" containerName="controller-manager" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.594317 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.598224 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.598330 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.598441 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.598470 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.598804 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.599052 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.605136 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.619853 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-658758c95c-lrq4q"] Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.666748 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/266db132-d641-4b22-b4fe-250167a0493b-proxy-ca-bundles\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.666795 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd8g5\" (UniqueName: \"kubernetes.io/projected/266db132-d641-4b22-b4fe-250167a0493b-kube-api-access-qd8g5\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.666823 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/266db132-d641-4b22-b4fe-250167a0493b-client-ca\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.666888 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/266db132-d641-4b22-b4fe-250167a0493b-config\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.667016 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/266db132-d641-4b22-b4fe-250167a0493b-serving-cert\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.768612 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/266db132-d641-4b22-b4fe-250167a0493b-serving-cert\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.768687 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/266db132-d641-4b22-b4fe-250167a0493b-proxy-ca-bundles\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.768712 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd8g5\" (UniqueName: \"kubernetes.io/projected/266db132-d641-4b22-b4fe-250167a0493b-kube-api-access-qd8g5\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.768734 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/266db132-d641-4b22-b4fe-250167a0493b-client-ca\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.768783 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/266db132-d641-4b22-b4fe-250167a0493b-config\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.770064 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/266db132-d641-4b22-b4fe-250167a0493b-client-ca\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.770176 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/266db132-d641-4b22-b4fe-250167a0493b-proxy-ca-bundles\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.770440 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/266db132-d641-4b22-b4fe-250167a0493b-config\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.774438 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/266db132-d641-4b22-b4fe-250167a0493b-serving-cert\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.787288 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd8g5\" (UniqueName: \"kubernetes.io/projected/266db132-d641-4b22-b4fe-250167a0493b-kube-api-access-qd8g5\") pod \"controller-manager-658758c95c-lrq4q\" (UID: \"266db132-d641-4b22-b4fe-250167a0493b\") " pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.833308 4816 generic.go:334] "Generic (PLEG): container finished" podID="286af964-57b7-4758-807c-14cc3d67f1e9" containerID="548e5703a2dcf1fe304581b78a6d141e01954a12566de9e6fea67abe5eb191b2" exitCode=0 Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.833602 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9cbbd" event={"ID":"286af964-57b7-4758-807c-14cc3d67f1e9","Type":"ContainerDied","Data":"548e5703a2dcf1fe304581b78a6d141e01954a12566de9e6fea67abe5eb191b2"} Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.835314 4816 generic.go:334] "Generic (PLEG): container finished" podID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerID="7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab" exitCode=0 Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.835756 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkwb" event={"ID":"d2519664-9d4a-43d0-847e-ffdb49a03d02","Type":"ContainerDied","Data":"7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab"} Feb 16 13:09:31 crc kubenswrapper[4816]: I0216 13:09:31.915844 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.322262 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-658758c95c-lrq4q"] Feb 16 13:09:32 crc kubenswrapper[4816]: W0216 13:09:32.344498 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod266db132_d641_4b22_b4fe_250167a0493b.slice/crio-c1e44e7c7578736b0b25282185de95ea8148c1de4007aaeea4355d741b6ec570 WatchSource:0}: Error finding container c1e44e7c7578736b0b25282185de95ea8148c1de4007aaeea4355d741b6ec570: Status 404 returned error can't find the container with id c1e44e7c7578736b0b25282185de95ea8148c1de4007aaeea4355d741b6ec570 Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.842420 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" event={"ID":"266db132-d641-4b22-b4fe-250167a0493b","Type":"ContainerStarted","Data":"575839094cfb6414a548c7637a4670e866af30beac98917c2e5214924c4d9141"} Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.842731 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" event={"ID":"266db132-d641-4b22-b4fe-250167a0493b","Type":"ContainerStarted","Data":"c1e44e7c7578736b0b25282185de95ea8148c1de4007aaeea4355d741b6ec570"} Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.843867 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.846751 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9cbbd" event={"ID":"286af964-57b7-4758-807c-14cc3d67f1e9","Type":"ContainerStarted","Data":"a651936d12ab028f94183b80d8284053fd363922a3d3ecc66e9a5862e9bf1d04"} Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.849483 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkwb" event={"ID":"d2519664-9d4a-43d0-847e-ffdb49a03d02","Type":"ContainerStarted","Data":"c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8"} Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.855636 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.870617 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-658758c95c-lrq4q" podStartSLOduration=3.87059549 podStartE2EDuration="3.87059549s" podCreationTimestamp="2026-02-16 13:09:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:09:32.865128893 +0000 UTC m=+372.191842641" watchObservedRunningTime="2026-02-16 13:09:32.87059549 +0000 UTC m=+372.197309228" Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.922505 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9cbbd" podStartSLOduration=2.449334489 podStartE2EDuration="4.922482464s" podCreationTimestamp="2026-02-16 13:09:28 +0000 UTC" firstStartedPulling="2026-02-16 13:09:29.798280285 +0000 UTC m=+369.124994013" lastFinishedPulling="2026-02-16 13:09:32.27142826 +0000 UTC m=+371.598141988" observedRunningTime="2026-02-16 13:09:32.919408741 +0000 UTC m=+372.246122479" watchObservedRunningTime="2026-02-16 13:09:32.922482464 +0000 UTC m=+372.249196192" Feb 16 13:09:32 crc kubenswrapper[4816]: I0216 13:09:32.938523 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-krkwb" podStartSLOduration=2.297771877 podStartE2EDuration="4.938501955s" podCreationTimestamp="2026-02-16 13:09:28 +0000 UTC" firstStartedPulling="2026-02-16 13:09:29.804158273 +0000 UTC m=+369.130872001" lastFinishedPulling="2026-02-16 13:09:32.444888341 +0000 UTC m=+371.771602079" observedRunningTime="2026-02-16 13:09:32.935789082 +0000 UTC m=+372.262502810" watchObservedRunningTime="2026-02-16 13:09:32.938501955 +0000 UTC m=+372.265215693" Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.394374 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.395940 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.439734 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.598761 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.598925 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.639823 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.914122 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rtqdh" Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.914303 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tcn99" Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.941455 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:09:36 crc kubenswrapper[4816]: I0216 13:09:36.941535 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:09:38 crc kubenswrapper[4816]: I0216 13:09:38.809501 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:38 crc kubenswrapper[4816]: I0216 13:09:38.809601 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:38 crc kubenswrapper[4816]: I0216 13:09:38.873711 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:38 crc kubenswrapper[4816]: I0216 13:09:38.927180 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9cbbd" Feb 16 13:09:39 crc kubenswrapper[4816]: I0216 13:09:39.046748 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:39 crc kubenswrapper[4816]: I0216 13:09:39.046836 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:39 crc kubenswrapper[4816]: I0216 13:09:39.093269 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:39 crc kubenswrapper[4816]: I0216 13:09:39.933196 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:09:50 crc kubenswrapper[4816]: I0216 13:09:50.031260 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-fjbzt" Feb 16 13:09:50 crc kubenswrapper[4816]: I0216 13:09:50.084888 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qjdjp"] Feb 16 13:10:06 crc kubenswrapper[4816]: I0216 13:10:06.941392 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:10:06 crc kubenswrapper[4816]: I0216 13:10:06.942319 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.122433 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" podUID="39dc10dd-2280-470a-b50e-272b7d1b705f" containerName="registry" containerID="cri-o://63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014" gracePeriod=30 Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.491926 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.530532 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-bound-sa-token\") pod \"39dc10dd-2280-470a-b50e-272b7d1b705f\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.530630 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-trusted-ca\") pod \"39dc10dd-2280-470a-b50e-272b7d1b705f\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.530743 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-certificates\") pod \"39dc10dd-2280-470a-b50e-272b7d1b705f\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.530919 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"39dc10dd-2280-470a-b50e-272b7d1b705f\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.531018 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-tls\") pod \"39dc10dd-2280-470a-b50e-272b7d1b705f\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.531069 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/39dc10dd-2280-470a-b50e-272b7d1b705f-ca-trust-extracted\") pod \"39dc10dd-2280-470a-b50e-272b7d1b705f\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.531105 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwtnj\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-kube-api-access-kwtnj\") pod \"39dc10dd-2280-470a-b50e-272b7d1b705f\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.531153 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/39dc10dd-2280-470a-b50e-272b7d1b705f-installation-pull-secrets\") pod \"39dc10dd-2280-470a-b50e-272b7d1b705f\" (UID: \"39dc10dd-2280-470a-b50e-272b7d1b705f\") " Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.531741 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "39dc10dd-2280-470a-b50e-272b7d1b705f" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.532998 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "39dc10dd-2280-470a-b50e-272b7d1b705f" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.538473 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-kube-api-access-kwtnj" (OuterVolumeSpecName: "kube-api-access-kwtnj") pod "39dc10dd-2280-470a-b50e-272b7d1b705f" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f"). InnerVolumeSpecName "kube-api-access-kwtnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.540035 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39dc10dd-2280-470a-b50e-272b7d1b705f-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "39dc10dd-2280-470a-b50e-272b7d1b705f" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.540234 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "39dc10dd-2280-470a-b50e-272b7d1b705f" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.540504 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "39dc10dd-2280-470a-b50e-272b7d1b705f" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.547651 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39dc10dd-2280-470a-b50e-272b7d1b705f-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "39dc10dd-2280-470a-b50e-272b7d1b705f" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.555919 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "39dc10dd-2280-470a-b50e-272b7d1b705f" (UID: "39dc10dd-2280-470a-b50e-272b7d1b705f"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.632534 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwtnj\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-kube-api-access-kwtnj\") on node \"crc\" DevicePath \"\"" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.632582 4816 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/39dc10dd-2280-470a-b50e-272b7d1b705f-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.632591 4816 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.632601 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.632612 4816 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.632620 4816 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/39dc10dd-2280-470a-b50e-272b7d1b705f-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:10:15 crc kubenswrapper[4816]: I0216 13:10:15.632640 4816 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/39dc10dd-2280-470a-b50e-272b7d1b705f-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 16 13:10:16 crc kubenswrapper[4816]: I0216 13:10:16.133901 4816 generic.go:334] "Generic (PLEG): container finished" podID="39dc10dd-2280-470a-b50e-272b7d1b705f" containerID="63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014" exitCode=0 Feb 16 13:10:16 crc kubenswrapper[4816]: I0216 13:10:16.133965 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" event={"ID":"39dc10dd-2280-470a-b50e-272b7d1b705f","Type":"ContainerDied","Data":"63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014"} Feb 16 13:10:16 crc kubenswrapper[4816]: I0216 13:10:16.134175 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" event={"ID":"39dc10dd-2280-470a-b50e-272b7d1b705f","Type":"ContainerDied","Data":"c7b8d9068f89c94b61cda7cc6b2d6a3baa845df4305df34f975da9fdebd5ce77"} Feb 16 13:10:16 crc kubenswrapper[4816]: I0216 13:10:16.134199 4816 scope.go:117] "RemoveContainer" containerID="63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014" Feb 16 13:10:16 crc kubenswrapper[4816]: I0216 13:10:16.133995 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qjdjp" Feb 16 13:10:16 crc kubenswrapper[4816]: I0216 13:10:16.167610 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qjdjp"] Feb 16 13:10:16 crc kubenswrapper[4816]: I0216 13:10:16.170431 4816 scope.go:117] "RemoveContainer" containerID="63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014" Feb 16 13:10:16 crc kubenswrapper[4816]: E0216 13:10:16.171400 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014\": container with ID starting with 63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014 not found: ID does not exist" containerID="63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014" Feb 16 13:10:16 crc kubenswrapper[4816]: I0216 13:10:16.171479 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014"} err="failed to get container status \"63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014\": rpc error: code = NotFound desc = could not find container \"63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014\": container with ID starting with 63bda4d0d0b7367fdf5409536021649bf621587481b25a7cda44d11484308014 not found: ID does not exist" Feb 16 13:10:16 crc kubenswrapper[4816]: I0216 13:10:16.173284 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qjdjp"] Feb 16 13:10:17 crc kubenswrapper[4816]: I0216 13:10:17.405481 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39dc10dd-2280-470a-b50e-272b7d1b705f" path="/var/lib/kubelet/pods/39dc10dd-2280-470a-b50e-272b7d1b705f/volumes" Feb 16 13:10:36 crc kubenswrapper[4816]: I0216 13:10:36.941290 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:10:36 crc kubenswrapper[4816]: I0216 13:10:36.941949 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:10:36 crc kubenswrapper[4816]: I0216 13:10:36.942006 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:10:36 crc kubenswrapper[4816]: I0216 13:10:36.942682 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a405cb39ec542f11b1ee9ee44ef19e54fb2c4eb861717d61b9255a4307d36e29"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:10:36 crc kubenswrapper[4816]: I0216 13:10:36.942749 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://a405cb39ec542f11b1ee9ee44ef19e54fb2c4eb861717d61b9255a4307d36e29" gracePeriod=600 Feb 16 13:10:37 crc kubenswrapper[4816]: I0216 13:10:37.261932 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="a405cb39ec542f11b1ee9ee44ef19e54fb2c4eb861717d61b9255a4307d36e29" exitCode=0 Feb 16 13:10:37 crc kubenswrapper[4816]: I0216 13:10:37.261994 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"a405cb39ec542f11b1ee9ee44ef19e54fb2c4eb861717d61b9255a4307d36e29"} Feb 16 13:10:37 crc kubenswrapper[4816]: I0216 13:10:37.262048 4816 scope.go:117] "RemoveContainer" containerID="6201a77a58d0befcd26d86a8d06fb40afcf6779ecfcbcb0e88cbca730167b352" Feb 16 13:10:38 crc kubenswrapper[4816]: I0216 13:10:38.269454 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"468b1f3fff8cff5cea98c4eb833e160c55f4f1d43b584a9679974b9249403d15"} Feb 16 13:13:06 crc kubenswrapper[4816]: I0216 13:13:06.940989 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:13:06 crc kubenswrapper[4816]: I0216 13:13:06.941609 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:13:36 crc kubenswrapper[4816]: I0216 13:13:36.941592 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:13:36 crc kubenswrapper[4816]: I0216 13:13:36.942197 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:14:06 crc kubenswrapper[4816]: I0216 13:14:06.940568 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:14:06 crc kubenswrapper[4816]: I0216 13:14:06.941208 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:14:06 crc kubenswrapper[4816]: I0216 13:14:06.941263 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:14:06 crc kubenswrapper[4816]: I0216 13:14:06.941830 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"468b1f3fff8cff5cea98c4eb833e160c55f4f1d43b584a9679974b9249403d15"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:14:06 crc kubenswrapper[4816]: I0216 13:14:06.941881 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://468b1f3fff8cff5cea98c4eb833e160c55f4f1d43b584a9679974b9249403d15" gracePeriod=600 Feb 16 13:14:07 crc kubenswrapper[4816]: I0216 13:14:07.545288 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="468b1f3fff8cff5cea98c4eb833e160c55f4f1d43b584a9679974b9249403d15" exitCode=0 Feb 16 13:14:07 crc kubenswrapper[4816]: I0216 13:14:07.545361 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"468b1f3fff8cff5cea98c4eb833e160c55f4f1d43b584a9679974b9249403d15"} Feb 16 13:14:07 crc kubenswrapper[4816]: I0216 13:14:07.545868 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"7e0981cd562f683639a286c8a9849e9acb7985787e5b7fb344492cda47873ec7"} Feb 16 13:14:07 crc kubenswrapper[4816]: I0216 13:14:07.545914 4816 scope.go:117] "RemoveContainer" containerID="a405cb39ec542f11b1ee9ee44ef19e54fb2c4eb861717d61b9255a4307d36e29" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.181194 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz"] Feb 16 13:15:00 crc kubenswrapper[4816]: E0216 13:15:00.182052 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39dc10dd-2280-470a-b50e-272b7d1b705f" containerName="registry" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.182069 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="39dc10dd-2280-470a-b50e-272b7d1b705f" containerName="registry" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.182188 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="39dc10dd-2280-470a-b50e-272b7d1b705f" containerName="registry" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.182647 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.185704 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.186689 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.196278 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz"] Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.255519 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqbqm\" (UniqueName: \"kubernetes.io/projected/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-kube-api-access-zqbqm\") pod \"collect-profiles-29520795-qb7gz\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.255625 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-config-volume\") pod \"collect-profiles-29520795-qb7gz\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.255681 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-secret-volume\") pod \"collect-profiles-29520795-qb7gz\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.357186 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqbqm\" (UniqueName: \"kubernetes.io/projected/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-kube-api-access-zqbqm\") pod \"collect-profiles-29520795-qb7gz\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.357241 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-config-volume\") pod \"collect-profiles-29520795-qb7gz\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.357263 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-secret-volume\") pod \"collect-profiles-29520795-qb7gz\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.358160 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-config-volume\") pod \"collect-profiles-29520795-qb7gz\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.366458 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-secret-volume\") pod \"collect-profiles-29520795-qb7gz\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.375856 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqbqm\" (UniqueName: \"kubernetes.io/projected/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-kube-api-access-zqbqm\") pod \"collect-profiles-29520795-qb7gz\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.512757 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.709278 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz"] Feb 16 13:15:00 crc kubenswrapper[4816]: W0216 13:15:00.716099 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1870327_3f1d_4a1a_b8ac_b463db7dc7ee.slice/crio-74d367baa93c9b2ab3b8b808f11c7f7312bbf6724bebfef1326cc378618d382e WatchSource:0}: Error finding container 74d367baa93c9b2ab3b8b808f11c7f7312bbf6724bebfef1326cc378618d382e: Status 404 returned error can't find the container with id 74d367baa93c9b2ab3b8b808f11c7f7312bbf6724bebfef1326cc378618d382e Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.863850 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" event={"ID":"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee","Type":"ContainerStarted","Data":"15bb0fd0ce57f2d60f2e13245b8c0efdbd3c1116e6082b1cfe0317dc9a3db27a"} Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.864176 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" event={"ID":"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee","Type":"ContainerStarted","Data":"74d367baa93c9b2ab3b8b808f11c7f7312bbf6724bebfef1326cc378618d382e"} Feb 16 13:15:00 crc kubenswrapper[4816]: I0216 13:15:00.886078 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" podStartSLOduration=0.886060939 podStartE2EDuration="886.060939ms" podCreationTimestamp="2026-02-16 13:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:15:00.882139622 +0000 UTC m=+700.208853370" watchObservedRunningTime="2026-02-16 13:15:00.886060939 +0000 UTC m=+700.212774667" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.425886 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-8jlcx"] Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.426523 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.427849 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.429408 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.429932 4816 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-6zflt" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.432389 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.432712 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-8jlcx"] Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.475706 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkczw\" (UniqueName: \"kubernetes.io/projected/06079d91-cbb5-4049-9ec4-9a78778c3846-kube-api-access-xkczw\") pod \"crc-storage-crc-8jlcx\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.475781 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06079d91-cbb5-4049-9ec4-9a78778c3846-node-mnt\") pod \"crc-storage-crc-8jlcx\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.475819 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06079d91-cbb5-4049-9ec4-9a78778c3846-crc-storage\") pod \"crc-storage-crc-8jlcx\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.576401 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06079d91-cbb5-4049-9ec4-9a78778c3846-node-mnt\") pod \"crc-storage-crc-8jlcx\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.576458 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06079d91-cbb5-4049-9ec4-9a78778c3846-crc-storage\") pod \"crc-storage-crc-8jlcx\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.576520 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkczw\" (UniqueName: \"kubernetes.io/projected/06079d91-cbb5-4049-9ec4-9a78778c3846-kube-api-access-xkczw\") pod \"crc-storage-crc-8jlcx\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.576795 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06079d91-cbb5-4049-9ec4-9a78778c3846-node-mnt\") pod \"crc-storage-crc-8jlcx\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.577313 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06079d91-cbb5-4049-9ec4-9a78778c3846-crc-storage\") pod \"crc-storage-crc-8jlcx\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.593318 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkczw\" (UniqueName: \"kubernetes.io/projected/06079d91-cbb5-4049-9ec4-9a78778c3846-kube-api-access-xkczw\") pod \"crc-storage-crc-8jlcx\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.741836 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.872699 4816 generic.go:334] "Generic (PLEG): container finished" podID="c1870327-3f1d-4a1a-b8ac-b463db7dc7ee" containerID="15bb0fd0ce57f2d60f2e13245b8c0efdbd3c1116e6082b1cfe0317dc9a3db27a" exitCode=0 Feb 16 13:15:01 crc kubenswrapper[4816]: I0216 13:15:01.872745 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" event={"ID":"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee","Type":"ContainerDied","Data":"15bb0fd0ce57f2d60f2e13245b8c0efdbd3c1116e6082b1cfe0317dc9a3db27a"} Feb 16 13:15:02 crc kubenswrapper[4816]: I0216 13:15:02.191362 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-8jlcx"] Feb 16 13:15:02 crc kubenswrapper[4816]: I0216 13:15:02.200163 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 13:15:02 crc kubenswrapper[4816]: I0216 13:15:02.888476 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8jlcx" event={"ID":"06079d91-cbb5-4049-9ec4-9a78778c3846","Type":"ContainerStarted","Data":"d6c03e7f46f1c7b3b3b5f0889b4fef1cadfd6d04131fcd1b08daf760cb4f34a7"} Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.127800 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.197918 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-config-volume\") pod \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.198036 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqbqm\" (UniqueName: \"kubernetes.io/projected/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-kube-api-access-zqbqm\") pod \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.198090 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-secret-volume\") pod \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\" (UID: \"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee\") " Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.199965 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-config-volume" (OuterVolumeSpecName: "config-volume") pod "c1870327-3f1d-4a1a-b8ac-b463db7dc7ee" (UID: "c1870327-3f1d-4a1a-b8ac-b463db7dc7ee"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.201159 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.216828 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c1870327-3f1d-4a1a-b8ac-b463db7dc7ee" (UID: "c1870327-3f1d-4a1a-b8ac-b463db7dc7ee"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.216870 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-kube-api-access-zqbqm" (OuterVolumeSpecName: "kube-api-access-zqbqm") pod "c1870327-3f1d-4a1a-b8ac-b463db7dc7ee" (UID: "c1870327-3f1d-4a1a-b8ac-b463db7dc7ee"). InnerVolumeSpecName "kube-api-access-zqbqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.302294 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqbqm\" (UniqueName: \"kubernetes.io/projected/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-kube-api-access-zqbqm\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.302323 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.893611 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" event={"ID":"c1870327-3f1d-4a1a-b8ac-b463db7dc7ee","Type":"ContainerDied","Data":"74d367baa93c9b2ab3b8b808f11c7f7312bbf6724bebfef1326cc378618d382e"} Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.894012 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74d367baa93c9b2ab3b8b808f11c7f7312bbf6724bebfef1326cc378618d382e" Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.894079 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz" Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.896427 4816 generic.go:334] "Generic (PLEG): container finished" podID="06079d91-cbb5-4049-9ec4-9a78778c3846" containerID="1b36b192d86353df7f3dd236e08170d9806e2046639e3b918c41edf8f3b3be91" exitCode=0 Feb 16 13:15:03 crc kubenswrapper[4816]: I0216 13:15:03.896462 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8jlcx" event={"ID":"06079d91-cbb5-4049-9ec4-9a78778c3846","Type":"ContainerDied","Data":"1b36b192d86353df7f3dd236e08170d9806e2046639e3b918c41edf8f3b3be91"} Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.125858 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.228905 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06079d91-cbb5-4049-9ec4-9a78778c3846-crc-storage\") pod \"06079d91-cbb5-4049-9ec4-9a78778c3846\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.229023 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06079d91-cbb5-4049-9ec4-9a78778c3846-node-mnt\") pod \"06079d91-cbb5-4049-9ec4-9a78778c3846\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.229046 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkczw\" (UniqueName: \"kubernetes.io/projected/06079d91-cbb5-4049-9ec4-9a78778c3846-kube-api-access-xkczw\") pod \"06079d91-cbb5-4049-9ec4-9a78778c3846\" (UID: \"06079d91-cbb5-4049-9ec4-9a78778c3846\") " Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.229170 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06079d91-cbb5-4049-9ec4-9a78778c3846-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "06079d91-cbb5-4049-9ec4-9a78778c3846" (UID: "06079d91-cbb5-4049-9ec4-9a78778c3846"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.235869 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06079d91-cbb5-4049-9ec4-9a78778c3846-kube-api-access-xkczw" (OuterVolumeSpecName: "kube-api-access-xkczw") pod "06079d91-cbb5-4049-9ec4-9a78778c3846" (UID: "06079d91-cbb5-4049-9ec4-9a78778c3846"). InnerVolumeSpecName "kube-api-access-xkczw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.249001 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06079d91-cbb5-4049-9ec4-9a78778c3846-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "06079d91-cbb5-4049-9ec4-9a78778c3846" (UID: "06079d91-cbb5-4049-9ec4-9a78778c3846"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.330440 4816 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06079d91-cbb5-4049-9ec4-9a78778c3846-node-mnt\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.330494 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkczw\" (UniqueName: \"kubernetes.io/projected/06079d91-cbb5-4049-9ec4-9a78778c3846-kube-api-access-xkczw\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.330514 4816 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06079d91-cbb5-4049-9ec4-9a78778c3846-crc-storage\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.910679 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8jlcx" event={"ID":"06079d91-cbb5-4049-9ec4-9a78778c3846","Type":"ContainerDied","Data":"d6c03e7f46f1c7b3b3b5f0889b4fef1cadfd6d04131fcd1b08daf760cb4f34a7"} Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.910724 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6c03e7f46f1c7b3b3b5f0889b4fef1cadfd6d04131fcd1b08daf760cb4f34a7" Feb 16 13:15:05 crc kubenswrapper[4816]: I0216 13:15:05.910750 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8jlcx" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.870449 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp"] Feb 16 13:15:12 crc kubenswrapper[4816]: E0216 13:15:12.872648 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06079d91-cbb5-4049-9ec4-9a78778c3846" containerName="storage" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.872765 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="06079d91-cbb5-4049-9ec4-9a78778c3846" containerName="storage" Feb 16 13:15:12 crc kubenswrapper[4816]: E0216 13:15:12.872853 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1870327-3f1d-4a1a-b8ac-b463db7dc7ee" containerName="collect-profiles" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.872952 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1870327-3f1d-4a1a-b8ac-b463db7dc7ee" containerName="collect-profiles" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.873213 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="06079d91-cbb5-4049-9ec4-9a78778c3846" containerName="storage" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.873320 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1870327-3f1d-4a1a-b8ac-b463db7dc7ee" containerName="collect-profiles" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.874647 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.878130 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.881885 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp"] Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.928488 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-util\") pod \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.928559 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-bundle\") pod \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:12 crc kubenswrapper[4816]: I0216 13:15:12.928825 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk7sf\" (UniqueName: \"kubernetes.io/projected/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-kube-api-access-xk7sf\") pod \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.030778 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-util\") pod \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.030869 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-bundle\") pod \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.030926 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk7sf\" (UniqueName: \"kubernetes.io/projected/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-kube-api-access-xk7sf\") pod \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.032169 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-util\") pod \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.032432 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-bundle\") pod \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.057735 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk7sf\" (UniqueName: \"kubernetes.io/projected/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-kube-api-access-xk7sf\") pod \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.207141 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.456635 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-s2hth"] Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.457364 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovn-controller" containerID="cri-o://2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89" gracePeriod=30 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.457809 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="northd" containerID="cri-o://858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6" gracePeriod=30 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.457881 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="nbdb" containerID="cri-o://4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603" gracePeriod=30 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.457905 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="sbdb" containerID="cri-o://060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1" gracePeriod=30 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.457976 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kube-rbac-proxy-node" containerID="cri-o://40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8" gracePeriod=30 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.457940 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9" gracePeriod=30 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.457930 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovn-acl-logging" containerID="cri-o://aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733" gracePeriod=30 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.487073 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp"] Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.488966 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" containerID="cri-o://9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21" gracePeriod=30 Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.533754 4816 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to get network status for pod sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(e2f373407b930fbca2fc31487fc95a9a55a6464ac4ff523c6935c538d06f9d8f): CNI network \"\" not found" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.533840 4816 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to get network status for pod sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(e2f373407b930fbca2fc31487fc95a9a55a6464ac4ff523c6935c538d06f9d8f): CNI network \"\" not found" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.533865 4816 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to get network status for pod sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(e2f373407b930fbca2fc31487fc95a9a55a6464ac4ff523c6935c538d06f9d8f): CNI network \"\" not found" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.533911 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace(ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace(ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef)\\\": rpc error: code = Unknown desc = failed to get network status for pod sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(e2f373407b930fbca2fc31487fc95a9a55a6464ac4ff523c6935c538d06f9d8f): CNI network \\\"\\\" not found\"" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.751470 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/3.log" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.754469 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovn-acl-logging/0.log" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.754949 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovn-controller/0.log" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.755370 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.810810 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-lwk7n"] Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.810989 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811001 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811010 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811015 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811023 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kube-rbac-proxy-ovn-metrics" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811029 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kube-rbac-proxy-ovn-metrics" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811036 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="northd" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811041 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="northd" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811049 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovn-acl-logging" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811055 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovn-acl-logging" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811066 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kube-rbac-proxy-node" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811072 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kube-rbac-proxy-node" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811079 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811085 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811091 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811096 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811105 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kubecfg-setup" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811127 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kubecfg-setup" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811139 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="sbdb" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811145 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="sbdb" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811156 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovn-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811162 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovn-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811169 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="nbdb" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811174 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="nbdb" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811261 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811269 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811277 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kube-rbac-proxy-node" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811286 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="northd" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811293 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811299 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovn-acl-logging" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811310 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovn-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811318 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="nbdb" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811326 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="sbdb" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811333 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="kube-rbac-proxy-ovn-metrics" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.811423 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811430 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811505 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.811688 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerName="ovnkube-controller" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.812928 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843064 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-slash\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843106 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-node-log\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843126 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843151 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-script-lib\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843172 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovn-node-metrics-cert\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843196 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-ovn\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843211 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-netd\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843209 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-slash" (OuterVolumeSpecName: "host-slash") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843239 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-ovn-kubernetes\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843272 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-node-log" (OuterVolumeSpecName: "node-log") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843287 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-openvswitch\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843329 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843358 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843377 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843392 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843390 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-log-socket\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843418 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-etc-openvswitch\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843422 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-log-socket" (OuterVolumeSpecName: "log-socket") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843439 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-var-lib-openvswitch\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843453 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843457 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-systemd\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843489 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-systemd-units\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843514 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-kubelet\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843553 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dc5ls\" (UniqueName: \"kubernetes.io/projected/0d1c53ef-b268-431b-bdb8-49f45d0715f8-kube-api-access-dc5ls\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843583 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-bin\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843607 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-config\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843626 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-netns\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843694 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-env-overrides\") pod \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\" (UID: \"0d1c53ef-b268-431b-bdb8-49f45d0715f8\") " Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843707 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843808 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843874 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-cni-netd\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843913 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-slash\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.843949 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-run-ovn-kubernetes\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844012 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-run-ovn\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844059 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-etc-openvswitch\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844084 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844115 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f95af4f1-68cb-443c-969f-695e48d0b81c-ovnkube-script-lib\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844152 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-node-log\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844180 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f95af4f1-68cb-443c-969f-695e48d0b81c-env-overrides\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844199 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844234 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-systemd-units\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844277 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f95af4f1-68cb-443c-969f-695e48d0b81c-ovn-node-metrics-cert\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844327 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-var-lib-openvswitch\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844357 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-log-socket\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844385 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-cni-bin\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844432 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-kubelet\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844462 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f95af4f1-68cb-443c-969f-695e48d0b81c-ovnkube-config\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844488 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-run-openvswitch\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844521 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-run-netns\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844547 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc97m\" (UniqueName: \"kubernetes.io/projected/f95af4f1-68cb-443c-969f-695e48d0b81c-kube-api-access-rc97m\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844584 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-run-systemd\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844604 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844685 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844742 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844756 4816 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844771 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844780 4816 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-netd\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844801 4816 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844817 4816 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844832 4816 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-log-socket\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844831 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844850 4816 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844867 4816 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844883 4816 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-cni-bin\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844899 4816 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-slash\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844910 4816 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-node-log\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.844924 4816 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.845188 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.848285 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d1c53ef-b268-431b-bdb8-49f45d0715f8-kube-api-access-dc5ls" (OuterVolumeSpecName: "kube-api-access-dc5ls") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "kube-api-access-dc5ls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.848437 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.858147 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "0d1c53ef-b268-431b-bdb8-49f45d0715f8" (UID: "0d1c53ef-b268-431b-bdb8-49f45d0715f8"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946185 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-run-ovn\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946251 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-etc-openvswitch\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946275 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946301 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f95af4f1-68cb-443c-969f-695e48d0b81c-ovnkube-script-lib\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946325 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-node-log\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946343 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f95af4f1-68cb-443c-969f-695e48d0b81c-env-overrides\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946372 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-systemd-units\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946400 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f95af4f1-68cb-443c-969f-695e48d0b81c-ovn-node-metrics-cert\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946428 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-log-socket\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946472 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-log-socket\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946492 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-run-ovn\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946509 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-node-log\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946548 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946590 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-systemd-units\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946572 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-etc-openvswitch\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946880 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-var-lib-openvswitch\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.946993 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-kubelet\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947039 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-cni-bin\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947056 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-var-lib-openvswitch\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947092 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f95af4f1-68cb-443c-969f-695e48d0b81c-ovnkube-config\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947168 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-run-netns\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947201 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-run-openvswitch\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947235 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc97m\" (UniqueName: \"kubernetes.io/projected/f95af4f1-68cb-443c-969f-695e48d0b81c-kube-api-access-rc97m\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947271 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-kubelet\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947284 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-run-systemd\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947312 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-run-openvswitch\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947342 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-run-netns\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947362 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-cni-netd\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947239 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f95af4f1-68cb-443c-969f-695e48d0b81c-env-overrides\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947440 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-slash\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947462 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f95af4f1-68cb-443c-969f-695e48d0b81c-ovnkube-script-lib\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947491 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-run-ovn-kubernetes\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947506 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-cni-netd\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947528 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-run-systemd\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947550 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-slash\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947373 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-cni-bin\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947593 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f95af4f1-68cb-443c-969f-695e48d0b81c-host-run-ovn-kubernetes\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947771 4816 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-run-systemd\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947796 4816 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-systemd-units\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947815 4816 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-kubelet\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947838 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dc5ls\" (UniqueName: \"kubernetes.io/projected/0d1c53ef-b268-431b-bdb8-49f45d0715f8-kube-api-access-dc5ls\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947856 4816 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947873 4816 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d1c53ef-b268-431b-bdb8-49f45d0715f8-host-run-netns\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947891 4816 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947914 4816 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.947936 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d1c53ef-b268-431b-bdb8-49f45d0715f8-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.948523 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f95af4f1-68cb-443c-969f-695e48d0b81c-ovnkube-config\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.950539 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f95af4f1-68cb-443c-969f-695e48d0b81c-ovn-node-metrics-cert\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.958858 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/2.log" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.959469 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/1.log" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.959522 4816 generic.go:334] "Generic (PLEG): container finished" podID="2a58f937-7095-4c3c-b401-3a68ae936b86" containerID="d5039fad603c0433bc27bccdf795af00d118b8c3d4eb02751a4cd317d59167a6" exitCode=2 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.959588 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-69xcw" event={"ID":"2a58f937-7095-4c3c-b401-3a68ae936b86","Type":"ContainerDied","Data":"d5039fad603c0433bc27bccdf795af00d118b8c3d4eb02751a4cd317d59167a6"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.959627 4816 scope.go:117] "RemoveContainer" containerID="a0a481a2096b4947391310eac15fa78d03f10a012c578a88552c155ca412d0dd" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.960335 4816 scope.go:117] "RemoveContainer" containerID="d5039fad603c0433bc27bccdf795af00d118b8c3d4eb02751a4cd317d59167a6" Feb 16 13:15:13 crc kubenswrapper[4816]: E0216 13:15:13.960699 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-69xcw_openshift-multus(2a58f937-7095-4c3c-b401-3a68ae936b86)\"" pod="openshift-multus/multus-69xcw" podUID="2a58f937-7095-4c3c-b401-3a68ae936b86" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.965173 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovnkube-controller/3.log" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.965649 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc97m\" (UniqueName: \"kubernetes.io/projected/f95af4f1-68cb-443c-969f-695e48d0b81c-kube-api-access-rc97m\") pod \"ovnkube-node-lwk7n\" (UID: \"f95af4f1-68cb-443c-969f-695e48d0b81c\") " pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.968624 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovn-acl-logging/0.log" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.969250 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-s2hth_0d1c53ef-b268-431b-bdb8-49f45d0715f8/ovn-controller/0.log" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971002 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21" exitCode=0 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971043 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1" exitCode=0 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971062 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603" exitCode=0 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971075 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6" exitCode=0 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971088 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9" exitCode=0 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971105 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8" exitCode=0 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971116 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733" exitCode=143 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971131 4816 generic.go:334] "Generic (PLEG): container finished" podID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" containerID="2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89" exitCode=143 Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971133 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971207 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971222 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971249 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971269 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971298 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971319 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971203 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971338 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971478 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971497 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971511 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971520 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971528 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971536 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971543 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971551 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971558 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971580 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971604 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971612 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971620 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971627 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971635 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971643 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971650 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971686 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971694 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971701 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971712 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971724 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971737 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971745 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971752 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971756 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971759 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971869 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971879 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971888 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971898 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971906 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971918 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s2hth" event={"ID":"0d1c53ef-b268-431b-bdb8-49f45d0715f8","Type":"ContainerDied","Data":"59c3e964c4f8728941bb03bec2bacdf211e38bd0ac1809ddabfe840c9e6b5a55"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971932 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971942 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971952 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971961 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971971 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971981 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.971995 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.972006 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.972016 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} Feb 16 13:15:13 crc kubenswrapper[4816]: I0216 13:15:13.972025 4816 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98"} Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.000674 4816 scope.go:117] "RemoveContainer" containerID="9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.016151 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-s2hth"] Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.017872 4816 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(bce772c7d7ec15d6de2d5f4570a958a1b4ec980c5827b0fbbf5dafb03b650f21): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.017939 4816 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(bce772c7d7ec15d6de2d5f4570a958a1b4ec980c5827b0fbbf5dafb03b650f21): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.017965 4816 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(bce772c7d7ec15d6de2d5f4570a958a1b4ec980c5827b0fbbf5dafb03b650f21): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.018024 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace(ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace(ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(bce772c7d7ec15d6de2d5f4570a958a1b4ec980c5827b0fbbf5dafb03b650f21): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.020382 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-s2hth"] Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.027605 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.045093 4816 scope.go:117] "RemoveContainer" containerID="060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.057386 4816 scope.go:117] "RemoveContainer" containerID="4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.097429 4816 scope.go:117] "RemoveContainer" containerID="858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.113150 4816 scope.go:117] "RemoveContainer" containerID="8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.125972 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.128230 4816 scope.go:117] "RemoveContainer" containerID="40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.147111 4816 scope.go:117] "RemoveContainer" containerID="aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.203164 4816 scope.go:117] "RemoveContainer" containerID="2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.220499 4816 scope.go:117] "RemoveContainer" containerID="c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.240511 4816 scope.go:117] "RemoveContainer" containerID="9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.240913 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21\": container with ID starting with 9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21 not found: ID does not exist" containerID="9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.240964 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} err="failed to get container status \"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21\": rpc error: code = NotFound desc = could not find container \"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21\": container with ID starting with 9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.240997 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.241301 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\": container with ID starting with a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525 not found: ID does not exist" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.241336 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} err="failed to get container status \"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\": rpc error: code = NotFound desc = could not find container \"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\": container with ID starting with a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.241360 4816 scope.go:117] "RemoveContainer" containerID="060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.241670 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\": container with ID starting with 060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1 not found: ID does not exist" containerID="060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.241702 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} err="failed to get container status \"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\": rpc error: code = NotFound desc = could not find container \"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\": container with ID starting with 060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.241718 4816 scope.go:117] "RemoveContainer" containerID="4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.241953 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\": container with ID starting with 4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603 not found: ID does not exist" containerID="4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.241992 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} err="failed to get container status \"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\": rpc error: code = NotFound desc = could not find container \"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\": container with ID starting with 4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.242027 4816 scope.go:117] "RemoveContainer" containerID="858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.242274 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\": container with ID starting with 858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6 not found: ID does not exist" containerID="858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.242305 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} err="failed to get container status \"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\": rpc error: code = NotFound desc = could not find container \"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\": container with ID starting with 858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.242318 4816 scope.go:117] "RemoveContainer" containerID="8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.242550 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\": container with ID starting with 8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9 not found: ID does not exist" containerID="8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.242570 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} err="failed to get container status \"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\": rpc error: code = NotFound desc = could not find container \"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\": container with ID starting with 8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.242581 4816 scope.go:117] "RemoveContainer" containerID="40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.242853 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\": container with ID starting with 40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8 not found: ID does not exist" containerID="40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.242887 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} err="failed to get container status \"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\": rpc error: code = NotFound desc = could not find container \"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\": container with ID starting with 40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.242907 4816 scope.go:117] "RemoveContainer" containerID="aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.243095 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\": container with ID starting with aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733 not found: ID does not exist" containerID="aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.243130 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} err="failed to get container status \"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\": rpc error: code = NotFound desc = could not find container \"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\": container with ID starting with aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.243150 4816 scope.go:117] "RemoveContainer" containerID="2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.243400 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\": container with ID starting with 2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89 not found: ID does not exist" containerID="2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.243425 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} err="failed to get container status \"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\": rpc error: code = NotFound desc = could not find container \"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\": container with ID starting with 2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.243438 4816 scope.go:117] "RemoveContainer" containerID="c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98" Feb 16 13:15:14 crc kubenswrapper[4816]: E0216 13:15:14.243625 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\": container with ID starting with c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98 not found: ID does not exist" containerID="c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.243686 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98"} err="failed to get container status \"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\": rpc error: code = NotFound desc = could not find container \"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\": container with ID starting with c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.243709 4816 scope.go:117] "RemoveContainer" containerID="9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.243984 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} err="failed to get container status \"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21\": rpc error: code = NotFound desc = could not find container \"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21\": container with ID starting with 9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.244003 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.244180 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} err="failed to get container status \"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\": rpc error: code = NotFound desc = could not find container \"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\": container with ID starting with a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.244196 4816 scope.go:117] "RemoveContainer" containerID="060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.244444 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} err="failed to get container status \"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\": rpc error: code = NotFound desc = could not find container \"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\": container with ID starting with 060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.244467 4816 scope.go:117] "RemoveContainer" containerID="4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.244742 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} err="failed to get container status \"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\": rpc error: code = NotFound desc = could not find container \"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\": container with ID starting with 4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.244768 4816 scope.go:117] "RemoveContainer" containerID="858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.245079 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} err="failed to get container status \"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\": rpc error: code = NotFound desc = could not find container \"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\": container with ID starting with 858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.245115 4816 scope.go:117] "RemoveContainer" containerID="8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.245307 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} err="failed to get container status \"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\": rpc error: code = NotFound desc = could not find container \"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\": container with ID starting with 8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.245322 4816 scope.go:117] "RemoveContainer" containerID="40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.245524 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} err="failed to get container status \"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\": rpc error: code = NotFound desc = could not find container \"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\": container with ID starting with 40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.245551 4816 scope.go:117] "RemoveContainer" containerID="aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.245827 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} err="failed to get container status \"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\": rpc error: code = NotFound desc = could not find container \"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\": container with ID starting with aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.245843 4816 scope.go:117] "RemoveContainer" containerID="2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.246078 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} err="failed to get container status \"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\": rpc error: code = NotFound desc = could not find container \"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\": container with ID starting with 2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.246109 4816 scope.go:117] "RemoveContainer" containerID="c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.246287 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98"} err="failed to get container status \"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\": rpc error: code = NotFound desc = could not find container \"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\": container with ID starting with c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.246306 4816 scope.go:117] "RemoveContainer" containerID="9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.246529 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} err="failed to get container status \"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21\": rpc error: code = NotFound desc = could not find container \"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21\": container with ID starting with 9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.246552 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.246800 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} err="failed to get container status \"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\": rpc error: code = NotFound desc = could not find container \"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\": container with ID starting with a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.246830 4816 scope.go:117] "RemoveContainer" containerID="060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.247064 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} err="failed to get container status \"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\": rpc error: code = NotFound desc = could not find container \"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\": container with ID starting with 060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.247082 4816 scope.go:117] "RemoveContainer" containerID="4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.247272 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} err="failed to get container status \"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\": rpc error: code = NotFound desc = could not find container \"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\": container with ID starting with 4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.247298 4816 scope.go:117] "RemoveContainer" containerID="858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.247501 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} err="failed to get container status \"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\": rpc error: code = NotFound desc = could not find container \"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\": container with ID starting with 858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.247518 4816 scope.go:117] "RemoveContainer" containerID="8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.247773 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} err="failed to get container status \"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\": rpc error: code = NotFound desc = could not find container \"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\": container with ID starting with 8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.247790 4816 scope.go:117] "RemoveContainer" containerID="40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.248192 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} err="failed to get container status \"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\": rpc error: code = NotFound desc = could not find container \"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\": container with ID starting with 40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.248221 4816 scope.go:117] "RemoveContainer" containerID="aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.248466 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} err="failed to get container status \"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\": rpc error: code = NotFound desc = could not find container \"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\": container with ID starting with aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.248486 4816 scope.go:117] "RemoveContainer" containerID="2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.248778 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} err="failed to get container status \"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\": rpc error: code = NotFound desc = could not find container \"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\": container with ID starting with 2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.248794 4816 scope.go:117] "RemoveContainer" containerID="c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.249040 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98"} err="failed to get container status \"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\": rpc error: code = NotFound desc = could not find container \"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\": container with ID starting with c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.249054 4816 scope.go:117] "RemoveContainer" containerID="9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.249366 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21"} err="failed to get container status \"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21\": rpc error: code = NotFound desc = could not find container \"9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21\": container with ID starting with 9017184df84eff754a22511ab19c66bc5126f6f58f95d9f42bb0e10923f17f21 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.249384 4816 scope.go:117] "RemoveContainer" containerID="a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.249616 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525"} err="failed to get container status \"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\": rpc error: code = NotFound desc = could not find container \"a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525\": container with ID starting with a5f60604454fbc9d1ec6c056477a8d8cec4730b52adef2c68db0f08c9317a525 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.249632 4816 scope.go:117] "RemoveContainer" containerID="060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.249923 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1"} err="failed to get container status \"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\": rpc error: code = NotFound desc = could not find container \"060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1\": container with ID starting with 060f762b7795b450c209ebf503f237304615f6f63cf7c0309dd0848eb60e89b1 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.249946 4816 scope.go:117] "RemoveContainer" containerID="4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.250187 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603"} err="failed to get container status \"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\": rpc error: code = NotFound desc = could not find container \"4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603\": container with ID starting with 4166d731084b21fd6a5ec9431f296f77292d7ce6dab5c5ddf1950256d9e0d603 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.250214 4816 scope.go:117] "RemoveContainer" containerID="858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.250451 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6"} err="failed to get container status \"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\": rpc error: code = NotFound desc = could not find container \"858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6\": container with ID starting with 858815bbde7131c8bb27d6991921b839416eeedd35945f9d78c194bcbb31f2f6 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.250476 4816 scope.go:117] "RemoveContainer" containerID="8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.251318 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9"} err="failed to get container status \"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\": rpc error: code = NotFound desc = could not find container \"8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9\": container with ID starting with 8165c1777f759ab8cc9a144b143bbccbfb77175b7472ee50d0b31b2e00a4fbc9 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.251349 4816 scope.go:117] "RemoveContainer" containerID="40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.251832 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8"} err="failed to get container status \"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\": rpc error: code = NotFound desc = could not find container \"40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8\": container with ID starting with 40cfe822e9d5d4c45868a87446cbc8faafa9a9cda1fe6655ac55a84ac706f6d8 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.251863 4816 scope.go:117] "RemoveContainer" containerID="aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.252161 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733"} err="failed to get container status \"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\": rpc error: code = NotFound desc = could not find container \"aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733\": container with ID starting with aa959f5b045a52695ec6a8f780ebe678f069719225b675645eae6029c636f733 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.252184 4816 scope.go:117] "RemoveContainer" containerID="2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.252798 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89"} err="failed to get container status \"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\": rpc error: code = NotFound desc = could not find container \"2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89\": container with ID starting with 2b6140aa27bae77e64a6501be1d26a68c041212dec1d838032a42d3d13eb1a89 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.252829 4816 scope.go:117] "RemoveContainer" containerID="c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.253160 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98"} err="failed to get container status \"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\": rpc error: code = NotFound desc = could not find container \"c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98\": container with ID starting with c108e737dbb54b30483aae6f9df49b0db9de3567b5e77b57060041c2c6c42c98 not found: ID does not exist" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.978810 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/2.log" Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.980921 4816 generic.go:334] "Generic (PLEG): container finished" podID="f95af4f1-68cb-443c-969f-695e48d0b81c" containerID="fbc00b09497b90600fa8b7a12dc87e7241ba28e0b7551d224e1d745e7cc60cde" exitCode=0 Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.981001 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerDied","Data":"fbc00b09497b90600fa8b7a12dc87e7241ba28e0b7551d224e1d745e7cc60cde"} Feb 16 13:15:14 crc kubenswrapper[4816]: I0216 13:15:14.981035 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerStarted","Data":"fa238ec62dad00bab94e05ecfa01e75a3d709d6a7b271aec312be3ba875523a3"} Feb 16 13:15:15 crc kubenswrapper[4816]: I0216 13:15:15.407076 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d1c53ef-b268-431b-bdb8-49f45d0715f8" path="/var/lib/kubelet/pods/0d1c53ef-b268-431b-bdb8-49f45d0715f8/volumes" Feb 16 13:15:15 crc kubenswrapper[4816]: I0216 13:15:15.991102 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerStarted","Data":"2d1aab05eab3553aca8187970ce6f4b59d08e5a0597c2490d0acf78d905196a6"} Feb 16 13:15:15 crc kubenswrapper[4816]: I0216 13:15:15.991405 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerStarted","Data":"16872fdc2ceb8e85e2102b681c25c68b4e578006626e4f26a609df51c508f545"} Feb 16 13:15:15 crc kubenswrapper[4816]: I0216 13:15:15.991419 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerStarted","Data":"d731b5fa5701d4786b60fe44980e12a9d17fd55914d599c137d1f02f87a2b281"} Feb 16 13:15:15 crc kubenswrapper[4816]: I0216 13:15:15.991430 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerStarted","Data":"7eec7f7c9be344af7af23531c2f4d852f837015d7dec915033a00aa42ded8ac9"} Feb 16 13:15:15 crc kubenswrapper[4816]: I0216 13:15:15.991440 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerStarted","Data":"9b105b45a6b963b10d4db68635b7885a6ccc7c2fe1167d8de2f43eb5f53cedf6"} Feb 16 13:15:15 crc kubenswrapper[4816]: I0216 13:15:15.991449 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerStarted","Data":"bc671dd450d978b5d5e375689142e8e8bb54ef1e6aeb79694697234b5fa0657f"} Feb 16 13:15:18 crc kubenswrapper[4816]: I0216 13:15:18.006890 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerStarted","Data":"d89e4557ad4cffab774af1f8d0cf1160aad7bc1a4a6dc75d521ae2d26ff09aa8"} Feb 16 13:15:20 crc kubenswrapper[4816]: I0216 13:15:20.023141 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" event={"ID":"f95af4f1-68cb-443c-969f-695e48d0b81c","Type":"ContainerStarted","Data":"130589f6500b34d12160d33af1132267dc5b6da047abb85a67ce2d55bdd4b166"} Feb 16 13:15:20 crc kubenswrapper[4816]: I0216 13:15:20.023797 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:20 crc kubenswrapper[4816]: I0216 13:15:20.023813 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:20 crc kubenswrapper[4816]: I0216 13:15:20.062498 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:20 crc kubenswrapper[4816]: I0216 13:15:20.075964 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" podStartSLOduration=7.075849668 podStartE2EDuration="7.075849668s" podCreationTimestamp="2026-02-16 13:15:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:15:20.061511588 +0000 UTC m=+719.388225336" watchObservedRunningTime="2026-02-16 13:15:20.075849668 +0000 UTC m=+719.402563406" Feb 16 13:15:21 crc kubenswrapper[4816]: I0216 13:15:21.028582 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:21 crc kubenswrapper[4816]: I0216 13:15:21.054414 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:21 crc kubenswrapper[4816]: I0216 13:15:21.674138 4816 scope.go:117] "RemoveContainer" containerID="92dbe3cf610dafb9114434dda500c5762160297a0bf428753decea5484796cd2" Feb 16 13:15:25 crc kubenswrapper[4816]: I0216 13:15:25.398071 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:25 crc kubenswrapper[4816]: I0216 13:15:25.398859 4816 scope.go:117] "RemoveContainer" containerID="d5039fad603c0433bc27bccdf795af00d118b8c3d4eb02751a4cd317d59167a6" Feb 16 13:15:25 crc kubenswrapper[4816]: I0216 13:15:25.398999 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:25 crc kubenswrapper[4816]: E0216 13:15:25.399332 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-69xcw_openshift-multus(2a58f937-7095-4c3c-b401-3a68ae936b86)\"" pod="openshift-multus/multus-69xcw" podUID="2a58f937-7095-4c3c-b401-3a68ae936b86" Feb 16 13:15:25 crc kubenswrapper[4816]: E0216 13:15:25.433198 4816 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(35d60754445ed9845e43ea97e88080088b5e3efbf8c944979c8ea1d8eb205601): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 16 13:15:25 crc kubenswrapper[4816]: E0216 13:15:25.433643 4816 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(35d60754445ed9845e43ea97e88080088b5e3efbf8c944979c8ea1d8eb205601): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:25 crc kubenswrapper[4816]: E0216 13:15:25.433698 4816 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(35d60754445ed9845e43ea97e88080088b5e3efbf8c944979c8ea1d8eb205601): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:25 crc kubenswrapper[4816]: E0216 13:15:25.433772 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace(ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace(ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(35d60754445ed9845e43ea97e88080088b5e3efbf8c944979c8ea1d8eb205601): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" Feb 16 13:15:37 crc kubenswrapper[4816]: I0216 13:15:37.398379 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:37 crc kubenswrapper[4816]: I0216 13:15:37.399143 4816 scope.go:117] "RemoveContainer" containerID="d5039fad603c0433bc27bccdf795af00d118b8c3d4eb02751a4cd317d59167a6" Feb 16 13:15:37 crc kubenswrapper[4816]: I0216 13:15:37.399475 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:37 crc kubenswrapper[4816]: E0216 13:15:37.447503 4816 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(a3ad8e748d3df2a7228714a3596969215ff3dac4c7037a9ceb431f9e49d2db4d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 16 13:15:37 crc kubenswrapper[4816]: E0216 13:15:37.447586 4816 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(a3ad8e748d3df2a7228714a3596969215ff3dac4c7037a9ceb431f9e49d2db4d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:37 crc kubenswrapper[4816]: E0216 13:15:37.447617 4816 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(a3ad8e748d3df2a7228714a3596969215ff3dac4c7037a9ceb431f9e49d2db4d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:37 crc kubenswrapper[4816]: E0216 13:15:37.447713 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace(ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace(ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_openshift-marketplace_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef_0(a3ad8e748d3df2a7228714a3596969215ff3dac4c7037a9ceb431f9e49d2db4d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" Feb 16 13:15:38 crc kubenswrapper[4816]: I0216 13:15:38.144284 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-69xcw_2a58f937-7095-4c3c-b401-3a68ae936b86/kube-multus/2.log" Feb 16 13:15:38 crc kubenswrapper[4816]: I0216 13:15:38.144524 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-69xcw" event={"ID":"2a58f937-7095-4c3c-b401-3a68ae936b86","Type":"ContainerStarted","Data":"d88f628c129d5bf0ebbf7161eff0d9bf276f72db2a2c8064e3554f8a9bb12fde"} Feb 16 13:15:44 crc kubenswrapper[4816]: I0216 13:15:44.157477 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lwk7n" Feb 16 13:15:50 crc kubenswrapper[4816]: I0216 13:15:50.397982 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:50 crc kubenswrapper[4816]: I0216 13:15:50.399772 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:50 crc kubenswrapper[4816]: W0216 13:15:50.636143 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba88b0ce_2a7b_4a6c_9c45_9094f54f17ef.slice/crio-341d5c5f7c5be917cfd13c4164f2619d268245417fe5700ad5c53af6fb5cf511 WatchSource:0}: Error finding container 341d5c5f7c5be917cfd13c4164f2619d268245417fe5700ad5c53af6fb5cf511: Status 404 returned error can't find the container with id 341d5c5f7c5be917cfd13c4164f2619d268245417fe5700ad5c53af6fb5cf511 Feb 16 13:15:51 crc kubenswrapper[4816]: I0216 13:15:51.221915 4816 generic.go:334] "Generic (PLEG): container finished" podID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerID="7874e55ab2b64e2f707dc5a8058a8648f0b935934cbf45c82cdbb70924254f4b" exitCode=0 Feb 16 13:15:51 crc kubenswrapper[4816]: I0216 13:15:51.222064 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" event={"ID":"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef","Type":"ContainerDied","Data":"7874e55ab2b64e2f707dc5a8058a8648f0b935934cbf45c82cdbb70924254f4b"} Feb 16 13:15:51 crc kubenswrapper[4816]: I0216 13:15:51.222249 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" event={"ID":"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef","Type":"ContainerStarted","Data":"341d5c5f7c5be917cfd13c4164f2619d268245417fe5700ad5c53af6fb5cf511"} Feb 16 13:15:53 crc kubenswrapper[4816]: I0216 13:15:53.232871 4816 generic.go:334] "Generic (PLEG): container finished" podID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerID="bd8f2395b014b7bcd79bfb46104e987fb4df0b888539f37e6bab2131a56494a5" exitCode=0 Feb 16 13:15:53 crc kubenswrapper[4816]: I0216 13:15:53.232954 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" event={"ID":"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef","Type":"ContainerDied","Data":"bd8f2395b014b7bcd79bfb46104e987fb4df0b888539f37e6bab2131a56494a5"} Feb 16 13:15:54 crc kubenswrapper[4816]: I0216 13:15:54.243098 4816 generic.go:334] "Generic (PLEG): container finished" podID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerID="61a4701d2cfe92435fc908ee48869da28b831856cb872465b46ae525435ebfa2" exitCode=0 Feb 16 13:15:54 crc kubenswrapper[4816]: I0216 13:15:54.243287 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" event={"ID":"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef","Type":"ContainerDied","Data":"61a4701d2cfe92435fc908ee48869da28b831856cb872465b46ae525435ebfa2"} Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.479195 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.610488 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xk7sf\" (UniqueName: \"kubernetes.io/projected/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-kube-api-access-xk7sf\") pod \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.610707 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-bundle\") pod \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.610728 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-util\") pod \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\" (UID: \"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef\") " Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.611371 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-bundle" (OuterVolumeSpecName: "bundle") pod "ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" (UID: "ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.615296 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-kube-api-access-xk7sf" (OuterVolumeSpecName: "kube-api-access-xk7sf") pod "ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" (UID: "ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef"). InnerVolumeSpecName "kube-api-access-xk7sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.624218 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-util" (OuterVolumeSpecName: "util") pod "ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" (UID: "ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.711768 4816 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.711805 4816 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-util\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.711816 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xk7sf\" (UniqueName: \"kubernetes.io/projected/ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef-kube-api-access-xk7sf\") on node \"crc\" DevicePath \"\"" Feb 16 13:15:55 crc kubenswrapper[4816]: I0216 13:15:55.943938 4816 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 16 13:15:56 crc kubenswrapper[4816]: I0216 13:15:56.256711 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" event={"ID":"ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef","Type":"ContainerDied","Data":"341d5c5f7c5be917cfd13c4164f2619d268245417fe5700ad5c53af6fb5cf511"} Feb 16 13:15:56 crc kubenswrapper[4816]: I0216 13:15:56.256747 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="341d5c5f7c5be917cfd13c4164f2619d268245417fe5700ad5c53af6fb5cf511" Feb 16 13:15:56 crc kubenswrapper[4816]: I0216 13:15:56.256773 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.704331 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-694c9596b7-dsvhf"] Feb 16 13:15:59 crc kubenswrapper[4816]: E0216 13:15:59.704849 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerName="pull" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.704862 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerName="pull" Feb 16 13:15:59 crc kubenswrapper[4816]: E0216 13:15:59.704878 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerName="util" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.704883 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerName="util" Feb 16 13:15:59 crc kubenswrapper[4816]: E0216 13:15:59.704894 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerName="extract" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.704906 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerName="extract" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.705013 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef" containerName="extract" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.705392 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-694c9596b7-dsvhf" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.708396 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.708593 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-nwvj6" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.711721 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.720908 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-694c9596b7-dsvhf"] Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.760736 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwzj7\" (UniqueName: \"kubernetes.io/projected/1a2b9a5f-f606-4247-83e8-efaa7185c2fb-kube-api-access-zwzj7\") pod \"nmstate-operator-694c9596b7-dsvhf\" (UID: \"1a2b9a5f-f606-4247-83e8-efaa7185c2fb\") " pod="openshift-nmstate/nmstate-operator-694c9596b7-dsvhf" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.861280 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwzj7\" (UniqueName: \"kubernetes.io/projected/1a2b9a5f-f606-4247-83e8-efaa7185c2fb-kube-api-access-zwzj7\") pod \"nmstate-operator-694c9596b7-dsvhf\" (UID: \"1a2b9a5f-f606-4247-83e8-efaa7185c2fb\") " pod="openshift-nmstate/nmstate-operator-694c9596b7-dsvhf" Feb 16 13:15:59 crc kubenswrapper[4816]: I0216 13:15:59.881984 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwzj7\" (UniqueName: \"kubernetes.io/projected/1a2b9a5f-f606-4247-83e8-efaa7185c2fb-kube-api-access-zwzj7\") pod \"nmstate-operator-694c9596b7-dsvhf\" (UID: \"1a2b9a5f-f606-4247-83e8-efaa7185c2fb\") " pod="openshift-nmstate/nmstate-operator-694c9596b7-dsvhf" Feb 16 13:16:00 crc kubenswrapper[4816]: I0216 13:16:00.021580 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-694c9596b7-dsvhf" Feb 16 13:16:00 crc kubenswrapper[4816]: I0216 13:16:00.237853 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-694c9596b7-dsvhf"] Feb 16 13:16:00 crc kubenswrapper[4816]: I0216 13:16:00.277434 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-694c9596b7-dsvhf" event={"ID":"1a2b9a5f-f606-4247-83e8-efaa7185c2fb","Type":"ContainerStarted","Data":"087222faa116c1e0a25949126c96b2136c84f897d928e722896acef2595621a6"} Feb 16 13:16:02 crc kubenswrapper[4816]: I0216 13:16:02.291898 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-694c9596b7-dsvhf" event={"ID":"1a2b9a5f-f606-4247-83e8-efaa7185c2fb","Type":"ContainerStarted","Data":"8e77702bbaa221ccaeb21780ef43766370eb547ab58f3560c63a46f5294f50ee"} Feb 16 13:16:02 crc kubenswrapper[4816]: I0216 13:16:02.310333 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-694c9596b7-dsvhf" podStartSLOduration=1.466076346 podStartE2EDuration="3.310320196s" podCreationTimestamp="2026-02-16 13:15:59 +0000 UTC" firstStartedPulling="2026-02-16 13:16:00.246788276 +0000 UTC m=+759.573502004" lastFinishedPulling="2026-02-16 13:16:02.091032136 +0000 UTC m=+761.417745854" observedRunningTime="2026-02-16 13:16:02.308159617 +0000 UTC m=+761.634873355" watchObservedRunningTime="2026-02-16 13:16:02.310320196 +0000 UTC m=+761.637033924" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.327463 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58c85c668d-294cz"] Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.328665 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58c85c668d-294cz" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.331952 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-lpgsm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.336426 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58c85c668d-294cz"] Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.340604 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr"] Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.341388 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.361258 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr"] Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.362198 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.364417 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-tc2tm"] Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.365224 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.501170 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/76133951-996b-4da9-aea3-f2095b86b4c6-nmstate-lock\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.501226 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c962989d-280a-449a-aa65-bd2c95cf319f-tls-key-pair\") pod \"nmstate-webhook-866bcb46dc-cd8sr\" (UID: \"c962989d-280a-449a-aa65-bd2c95cf319f\") " pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.501268 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gxz2\" (UniqueName: \"kubernetes.io/projected/c962989d-280a-449a-aa65-bd2c95cf319f-kube-api-access-5gxz2\") pod \"nmstate-webhook-866bcb46dc-cd8sr\" (UID: \"c962989d-280a-449a-aa65-bd2c95cf319f\") " pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.501319 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmpvm\" (UniqueName: \"kubernetes.io/projected/acc14b47-c3db-4a65-9f0b-f50acf3d3cb0-kube-api-access-lmpvm\") pod \"nmstate-metrics-58c85c668d-294cz\" (UID: \"acc14b47-c3db-4a65-9f0b-f50acf3d3cb0\") " pod="openshift-nmstate/nmstate-metrics-58c85c668d-294cz" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.501336 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/76133951-996b-4da9-aea3-f2095b86b4c6-dbus-socket\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.501355 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q2nl\" (UniqueName: \"kubernetes.io/projected/76133951-996b-4da9-aea3-f2095b86b4c6-kube-api-access-9q2nl\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.501412 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/76133951-996b-4da9-aea3-f2095b86b4c6-ovs-socket\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.503576 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx"] Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.504266 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.507094 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.507243 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.507380 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-vpl5q" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.514968 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx"] Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.602864 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/76133951-996b-4da9-aea3-f2095b86b4c6-ovs-socket\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.602956 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/76133951-996b-4da9-aea3-f2095b86b4c6-nmstate-lock\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.602977 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/76133951-996b-4da9-aea3-f2095b86b4c6-ovs-socket\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.602989 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c962989d-280a-449a-aa65-bd2c95cf319f-tls-key-pair\") pod \"nmstate-webhook-866bcb46dc-cd8sr\" (UID: \"c962989d-280a-449a-aa65-bd2c95cf319f\") " pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.603048 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/76133951-996b-4da9-aea3-f2095b86b4c6-nmstate-lock\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.603507 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gxz2\" (UniqueName: \"kubernetes.io/projected/c962989d-280a-449a-aa65-bd2c95cf319f-kube-api-access-5gxz2\") pod \"nmstate-webhook-866bcb46dc-cd8sr\" (UID: \"c962989d-280a-449a-aa65-bd2c95cf319f\") " pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.603547 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmpvm\" (UniqueName: \"kubernetes.io/projected/acc14b47-c3db-4a65-9f0b-f50acf3d3cb0-kube-api-access-lmpvm\") pod \"nmstate-metrics-58c85c668d-294cz\" (UID: \"acc14b47-c3db-4a65-9f0b-f50acf3d3cb0\") " pod="openshift-nmstate/nmstate-metrics-58c85c668d-294cz" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.603573 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/76133951-996b-4da9-aea3-f2095b86b4c6-dbus-socket\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.603612 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q2nl\" (UniqueName: \"kubernetes.io/projected/76133951-996b-4da9-aea3-f2095b86b4c6-kube-api-access-9q2nl\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.603956 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/76133951-996b-4da9-aea3-f2095b86b4c6-dbus-socket\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.611386 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c962989d-280a-449a-aa65-bd2c95cf319f-tls-key-pair\") pod \"nmstate-webhook-866bcb46dc-cd8sr\" (UID: \"c962989d-280a-449a-aa65-bd2c95cf319f\") " pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.621641 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q2nl\" (UniqueName: \"kubernetes.io/projected/76133951-996b-4da9-aea3-f2095b86b4c6-kube-api-access-9q2nl\") pod \"nmstate-handler-tc2tm\" (UID: \"76133951-996b-4da9-aea3-f2095b86b4c6\") " pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.628264 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmpvm\" (UniqueName: \"kubernetes.io/projected/acc14b47-c3db-4a65-9f0b-f50acf3d3cb0-kube-api-access-lmpvm\") pod \"nmstate-metrics-58c85c668d-294cz\" (UID: \"acc14b47-c3db-4a65-9f0b-f50acf3d3cb0\") " pod="openshift-nmstate/nmstate-metrics-58c85c668d-294cz" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.631675 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gxz2\" (UniqueName: \"kubernetes.io/projected/c962989d-280a-449a-aa65-bd2c95cf319f-kube-api-access-5gxz2\") pod \"nmstate-webhook-866bcb46dc-cd8sr\" (UID: \"c962989d-280a-449a-aa65-bd2c95cf319f\") " pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.654243 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58c85c668d-294cz" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.667531 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.684934 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.705162 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea4dc88c-2154-46af-937c-341a1afd226e-plugin-serving-cert\") pod \"nmstate-console-plugin-5c78fc5d65-sl8qx\" (UID: \"ea4dc88c-2154-46af-937c-341a1afd226e\") " pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.705715 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ea4dc88c-2154-46af-937c-341a1afd226e-nginx-conf\") pod \"nmstate-console-plugin-5c78fc5d65-sl8qx\" (UID: \"ea4dc88c-2154-46af-937c-341a1afd226e\") " pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.705842 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhlvg\" (UniqueName: \"kubernetes.io/projected/ea4dc88c-2154-46af-937c-341a1afd226e-kube-api-access-bhlvg\") pod \"nmstate-console-plugin-5c78fc5d65-sl8qx\" (UID: \"ea4dc88c-2154-46af-937c-341a1afd226e\") " pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.723886 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-55ff78d854-jsgvb"] Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.724734 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.747912 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-55ff78d854-jsgvb"] Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806372 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-oauth-serving-cert\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806420 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-console-config\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806449 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ea4dc88c-2154-46af-937c-341a1afd226e-nginx-conf\") pod \"nmstate-console-plugin-5c78fc5d65-sl8qx\" (UID: \"ea4dc88c-2154-46af-937c-341a1afd226e\") " pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806466 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dh5g\" (UniqueName: \"kubernetes.io/projected/e5745028-bd7c-4c96-82db-386bb5a7c5d4-kube-api-access-4dh5g\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806483 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-service-ca\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806507 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhlvg\" (UniqueName: \"kubernetes.io/projected/ea4dc88c-2154-46af-937c-341a1afd226e-kube-api-access-bhlvg\") pod \"nmstate-console-plugin-5c78fc5d65-sl8qx\" (UID: \"ea4dc88c-2154-46af-937c-341a1afd226e\") " pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806524 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-trusted-ca-bundle\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806542 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e5745028-bd7c-4c96-82db-386bb5a7c5d4-console-serving-cert\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806572 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e5745028-bd7c-4c96-82db-386bb5a7c5d4-console-oauth-config\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.806588 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea4dc88c-2154-46af-937c-341a1afd226e-plugin-serving-cert\") pod \"nmstate-console-plugin-5c78fc5d65-sl8qx\" (UID: \"ea4dc88c-2154-46af-937c-341a1afd226e\") " pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.807849 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ea4dc88c-2154-46af-937c-341a1afd226e-nginx-conf\") pod \"nmstate-console-plugin-5c78fc5d65-sl8qx\" (UID: \"ea4dc88c-2154-46af-937c-341a1afd226e\") " pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.811506 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ea4dc88c-2154-46af-937c-341a1afd226e-plugin-serving-cert\") pod \"nmstate-console-plugin-5c78fc5d65-sl8qx\" (UID: \"ea4dc88c-2154-46af-937c-341a1afd226e\") " pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.825383 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhlvg\" (UniqueName: \"kubernetes.io/projected/ea4dc88c-2154-46af-937c-341a1afd226e-kube-api-access-bhlvg\") pod \"nmstate-console-plugin-5c78fc5d65-sl8qx\" (UID: \"ea4dc88c-2154-46af-937c-341a1afd226e\") " pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.907848 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-trusted-ca-bundle\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.907894 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e5745028-bd7c-4c96-82db-386bb5a7c5d4-console-serving-cert\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.907933 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e5745028-bd7c-4c96-82db-386bb5a7c5d4-console-oauth-config\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.907967 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-oauth-serving-cert\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.907994 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-console-config\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.908019 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dh5g\" (UniqueName: \"kubernetes.io/projected/e5745028-bd7c-4c96-82db-386bb5a7c5d4-kube-api-access-4dh5g\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.908034 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-service-ca\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.909708 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-console-config\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.910413 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-trusted-ca-bundle\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.910428 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-service-ca\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.910542 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e5745028-bd7c-4c96-82db-386bb5a7c5d4-oauth-serving-cert\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.913158 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e5745028-bd7c-4c96-82db-386bb5a7c5d4-console-serving-cert\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.913292 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e5745028-bd7c-4c96-82db-386bb5a7c5d4-console-oauth-config\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.927809 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dh5g\" (UniqueName: \"kubernetes.io/projected/e5745028-bd7c-4c96-82db-386bb5a7c5d4-kube-api-access-4dh5g\") pod \"console-55ff78d854-jsgvb\" (UID: \"e5745028-bd7c-4c96-82db-386bb5a7c5d4\") " pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:03 crc kubenswrapper[4816]: I0216 13:16:03.964241 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58c85c668d-294cz"] Feb 16 13:16:04 crc kubenswrapper[4816]: I0216 13:16:04.046154 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:04 crc kubenswrapper[4816]: I0216 13:16:04.120095 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" Feb 16 13:16:04 crc kubenswrapper[4816]: I0216 13:16:04.218460 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-55ff78d854-jsgvb"] Feb 16 13:16:04 crc kubenswrapper[4816]: I0216 13:16:04.226230 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr"] Feb 16 13:16:04 crc kubenswrapper[4816]: W0216 13:16:04.228553 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5745028_bd7c_4c96_82db_386bb5a7c5d4.slice/crio-a38303bddb404148a5679d6a3a137ac5297372acba9730dd6365c021d8f985ed WatchSource:0}: Error finding container a38303bddb404148a5679d6a3a137ac5297372acba9730dd6365c021d8f985ed: Status 404 returned error can't find the container with id a38303bddb404148a5679d6a3a137ac5297372acba9730dd6365c021d8f985ed Feb 16 13:16:04 crc kubenswrapper[4816]: W0216 13:16:04.231770 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc962989d_280a_449a_aa65_bd2c95cf319f.slice/crio-b6ab316d6264cafcd1eb8f666b04db6b422302517c34547a458cdc6dfb9f9926 WatchSource:0}: Error finding container b6ab316d6264cafcd1eb8f666b04db6b422302517c34547a458cdc6dfb9f9926: Status 404 returned error can't find the container with id b6ab316d6264cafcd1eb8f666b04db6b422302517c34547a458cdc6dfb9f9926 Feb 16 13:16:04 crc kubenswrapper[4816]: I0216 13:16:04.303840 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" event={"ID":"c962989d-280a-449a-aa65-bd2c95cf319f","Type":"ContainerStarted","Data":"b6ab316d6264cafcd1eb8f666b04db6b422302517c34547a458cdc6dfb9f9926"} Feb 16 13:16:04 crc kubenswrapper[4816]: I0216 13:16:04.305369 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-55ff78d854-jsgvb" event={"ID":"e5745028-bd7c-4c96-82db-386bb5a7c5d4","Type":"ContainerStarted","Data":"a38303bddb404148a5679d6a3a137ac5297372acba9730dd6365c021d8f985ed"} Feb 16 13:16:04 crc kubenswrapper[4816]: I0216 13:16:04.306424 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-tc2tm" event={"ID":"76133951-996b-4da9-aea3-f2095b86b4c6","Type":"ContainerStarted","Data":"2c8dcebd72078701e793288db905aa0a7c613dafb49b75427da09b29c168b96b"} Feb 16 13:16:04 crc kubenswrapper[4816]: I0216 13:16:04.307399 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58c85c668d-294cz" event={"ID":"acc14b47-c3db-4a65-9f0b-f50acf3d3cb0","Type":"ContainerStarted","Data":"4532a0fd545fa465d98eab152824acf98cb478e5ed310f50337011ccfefbfe6f"} Feb 16 13:16:04 crc kubenswrapper[4816]: I0216 13:16:04.325493 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx"] Feb 16 13:16:04 crc kubenswrapper[4816]: W0216 13:16:04.337005 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea4dc88c_2154_46af_937c_341a1afd226e.slice/crio-f1ddbb9ec24a1868ea7542b6ba716c751d8c637e3cbba791c5c80fd284e2518b WatchSource:0}: Error finding container f1ddbb9ec24a1868ea7542b6ba716c751d8c637e3cbba791c5c80fd284e2518b: Status 404 returned error can't find the container with id f1ddbb9ec24a1868ea7542b6ba716c751d8c637e3cbba791c5c80fd284e2518b Feb 16 13:16:05 crc kubenswrapper[4816]: I0216 13:16:05.314688 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" event={"ID":"ea4dc88c-2154-46af-937c-341a1afd226e","Type":"ContainerStarted","Data":"f1ddbb9ec24a1868ea7542b6ba716c751d8c637e3cbba791c5c80fd284e2518b"} Feb 16 13:16:05 crc kubenswrapper[4816]: I0216 13:16:05.316356 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-55ff78d854-jsgvb" event={"ID":"e5745028-bd7c-4c96-82db-386bb5a7c5d4","Type":"ContainerStarted","Data":"8df91f5ac26580818d5773ea54c95b93341c42e4a640d4cc2b81d32173bcd27c"} Feb 16 13:16:05 crc kubenswrapper[4816]: I0216 13:16:05.336915 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-55ff78d854-jsgvb" podStartSLOduration=2.336891558 podStartE2EDuration="2.336891558s" podCreationTimestamp="2026-02-16 13:16:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:16:05.335223614 +0000 UTC m=+764.661937362" watchObservedRunningTime="2026-02-16 13:16:05.336891558 +0000 UTC m=+764.663605296" Feb 16 13:16:06 crc kubenswrapper[4816]: I0216 13:16:06.324133 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58c85c668d-294cz" event={"ID":"acc14b47-c3db-4a65-9f0b-f50acf3d3cb0","Type":"ContainerStarted","Data":"24e6e9fa77b3aabd4a72ea6beb5ecfd5e2d246a862607f6dc03d06437dcb9bb2"} Feb 16 13:16:06 crc kubenswrapper[4816]: I0216 13:16:06.325594 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" event={"ID":"c962989d-280a-449a-aa65-bd2c95cf319f","Type":"ContainerStarted","Data":"3e14862c050f4cbe9e3c33454f0461a819e8890871d14007c051e47f2d7dbe26"} Feb 16 13:16:06 crc kubenswrapper[4816]: I0216 13:16:06.343177 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" podStartSLOduration=1.512657122 podStartE2EDuration="3.343160169s" podCreationTimestamp="2026-02-16 13:16:03 +0000 UTC" firstStartedPulling="2026-02-16 13:16:04.233909805 +0000 UTC m=+763.560623543" lastFinishedPulling="2026-02-16 13:16:06.064412852 +0000 UTC m=+765.391126590" observedRunningTime="2026-02-16 13:16:06.342582523 +0000 UTC m=+765.669296251" watchObservedRunningTime="2026-02-16 13:16:06.343160169 +0000 UTC m=+765.669873897" Feb 16 13:16:07 crc kubenswrapper[4816]: I0216 13:16:07.333252 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-tc2tm" event={"ID":"76133951-996b-4da9-aea3-f2095b86b4c6","Type":"ContainerStarted","Data":"6f5c24fe0601ec2e2c4485968c2fd91c682cd7b7d89fd4aa5c5ccaa6a3e551a9"} Feb 16 13:16:07 crc kubenswrapper[4816]: I0216 13:16:07.333598 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:07 crc kubenswrapper[4816]: I0216 13:16:07.335265 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" event={"ID":"ea4dc88c-2154-46af-937c-341a1afd226e","Type":"ContainerStarted","Data":"041ed48ae01fb55dbf530830282349dce6d85dd9c64149acd3ea19287339587c"} Feb 16 13:16:07 crc kubenswrapper[4816]: I0216 13:16:07.336945 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:07 crc kubenswrapper[4816]: I0216 13:16:07.350380 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-tc2tm" podStartSLOduration=2.036632625 podStartE2EDuration="4.350362065s" podCreationTimestamp="2026-02-16 13:16:03 +0000 UTC" firstStartedPulling="2026-02-16 13:16:03.760444197 +0000 UTC m=+763.087157925" lastFinishedPulling="2026-02-16 13:16:06.074173627 +0000 UTC m=+765.400887365" observedRunningTime="2026-02-16 13:16:07.349728638 +0000 UTC m=+766.676442376" watchObservedRunningTime="2026-02-16 13:16:07.350362065 +0000 UTC m=+766.677075793" Feb 16 13:16:07 crc kubenswrapper[4816]: I0216 13:16:07.366042 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-5c78fc5d65-sl8qx" podStartSLOduration=1.713856439 podStartE2EDuration="4.366021721s" podCreationTimestamp="2026-02-16 13:16:03 +0000 UTC" firstStartedPulling="2026-02-16 13:16:04.340069245 +0000 UTC m=+763.666782973" lastFinishedPulling="2026-02-16 13:16:06.992234527 +0000 UTC m=+766.318948255" observedRunningTime="2026-02-16 13:16:07.364474079 +0000 UTC m=+766.691187807" watchObservedRunningTime="2026-02-16 13:16:07.366021721 +0000 UTC m=+766.692735459" Feb 16 13:16:08 crc kubenswrapper[4816]: I0216 13:16:08.344147 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58c85c668d-294cz" event={"ID":"acc14b47-c3db-4a65-9f0b-f50acf3d3cb0","Type":"ContainerStarted","Data":"e00f3b0a264cc2101e64b93ba12ef2df7753386c87d5da10b6ea1d5f855fcdec"} Feb 16 13:16:08 crc kubenswrapper[4816]: I0216 13:16:08.372112 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58c85c668d-294cz" podStartSLOduration=1.183548426 podStartE2EDuration="5.372088086s" podCreationTimestamp="2026-02-16 13:16:03 +0000 UTC" firstStartedPulling="2026-02-16 13:16:03.971266697 +0000 UTC m=+763.297980425" lastFinishedPulling="2026-02-16 13:16:08.159806357 +0000 UTC m=+767.486520085" observedRunningTime="2026-02-16 13:16:08.36486997 +0000 UTC m=+767.691583738" watchObservedRunningTime="2026-02-16 13:16:08.372088086 +0000 UTC m=+767.698801834" Feb 16 13:16:13 crc kubenswrapper[4816]: I0216 13:16:13.709876 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-tc2tm" Feb 16 13:16:14 crc kubenswrapper[4816]: I0216 13:16:14.047260 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:14 crc kubenswrapper[4816]: I0216 13:16:14.047364 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:14 crc kubenswrapper[4816]: I0216 13:16:14.055131 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:14 crc kubenswrapper[4816]: I0216 13:16:14.398705 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-55ff78d854-jsgvb" Feb 16 13:16:14 crc kubenswrapper[4816]: I0216 13:16:14.456874 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-f2dr7"] Feb 16 13:16:23 crc kubenswrapper[4816]: I0216 13:16:23.678242 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-866bcb46dc-cd8sr" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.103762 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9"] Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.105488 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.107354 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.115503 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-bundle\") pod \"a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.116471 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-util\") pod \"a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.116569 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9"] Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.116612 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lcb2\" (UniqueName: \"kubernetes.io/projected/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-kube-api-access-6lcb2\") pod \"a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.217027 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-bundle\") pod \"a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.217105 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-util\") pod \"a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.217171 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lcb2\" (UniqueName: \"kubernetes.io/projected/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-kube-api-access-6lcb2\") pod \"a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.217882 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-util\") pod \"a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.217901 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-bundle\") pod \"a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.235030 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lcb2\" (UniqueName: \"kubernetes.io/projected/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-kube-api-access-6lcb2\") pod \"a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.422566 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:35 crc kubenswrapper[4816]: I0216 13:16:35.626761 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9"] Feb 16 13:16:36 crc kubenswrapper[4816]: I0216 13:16:36.519892 4816 generic.go:334] "Generic (PLEG): container finished" podID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerID="f18fb63d6e7e50f96d57e3bc5584d5b5e113343e3c9b49b5d0e7f9b5f18a933e" exitCode=0 Feb 16 13:16:36 crc kubenswrapper[4816]: I0216 13:16:36.519944 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" event={"ID":"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1","Type":"ContainerDied","Data":"f18fb63d6e7e50f96d57e3bc5584d5b5e113343e3c9b49b5d0e7f9b5f18a933e"} Feb 16 13:16:36 crc kubenswrapper[4816]: I0216 13:16:36.520212 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" event={"ID":"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1","Type":"ContainerStarted","Data":"122bd92a24ecb3a4bb6856b010d8461a98d508c4f177cc692cb69f4cd278a1a3"} Feb 16 13:16:36 crc kubenswrapper[4816]: I0216 13:16:36.941273 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:16:36 crc kubenswrapper[4816]: I0216 13:16:36.941344 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.476900 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8b9pn"] Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.478244 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.486628 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8b9pn"] Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.550281 4816 generic.go:334] "Generic (PLEG): container finished" podID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerID="118976d948987034df25b6c0f03a687e26c55ef55956ba87e5e879b8811b00d1" exitCode=0 Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.550360 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" event={"ID":"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1","Type":"ContainerDied","Data":"118976d948987034df25b6c0f03a687e26c55ef55956ba87e5e879b8811b00d1"} Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.563676 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gh6r\" (UniqueName: \"kubernetes.io/projected/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-kube-api-access-9gh6r\") pod \"redhat-operators-8b9pn\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.563917 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-utilities\") pod \"redhat-operators-8b9pn\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.564026 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-catalog-content\") pod \"redhat-operators-8b9pn\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.665111 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gh6r\" (UniqueName: \"kubernetes.io/projected/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-kube-api-access-9gh6r\") pod \"redhat-operators-8b9pn\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.665351 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-utilities\") pod \"redhat-operators-8b9pn\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.665451 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-catalog-content\") pod \"redhat-operators-8b9pn\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.665976 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-catalog-content\") pod \"redhat-operators-8b9pn\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.666006 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-utilities\") pod \"redhat-operators-8b9pn\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.686714 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gh6r\" (UniqueName: \"kubernetes.io/projected/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-kube-api-access-9gh6r\") pod \"redhat-operators-8b9pn\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:38 crc kubenswrapper[4816]: I0216 13:16:38.813033 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.067339 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8b9pn"] Feb 16 13:16:39 crc kubenswrapper[4816]: W0216 13:16:39.076414 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d1ca3f5_f93c_4f2a_ae3c_e02274ee24b9.slice/crio-a4c47abe6161decd284754a68bb35df8119edf678fbf134bb5df34c66fd285a8 WatchSource:0}: Error finding container a4c47abe6161decd284754a68bb35df8119edf678fbf134bb5df34c66fd285a8: Status 404 returned error can't find the container with id a4c47abe6161decd284754a68bb35df8119edf678fbf134bb5df34c66fd285a8 Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.499231 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-f2dr7" podUID="89f428fd-8717-4819-81d8-ee04443b38a5" containerName="console" containerID="cri-o://2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607" gracePeriod=15 Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.557905 4816 generic.go:334] "Generic (PLEG): container finished" podID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerID="77c6c7730d2b552cf9c3f29d6ad87198904a218873665e8a7522070790a40c13" exitCode=0 Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.557969 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" event={"ID":"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1","Type":"ContainerDied","Data":"77c6c7730d2b552cf9c3f29d6ad87198904a218873665e8a7522070790a40c13"} Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.559618 4816 generic.go:334] "Generic (PLEG): container finished" podID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerID="193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13" exitCode=0 Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.559666 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8b9pn" event={"ID":"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9","Type":"ContainerDied","Data":"193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13"} Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.559688 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8b9pn" event={"ID":"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9","Type":"ContainerStarted","Data":"a4c47abe6161decd284754a68bb35df8119edf678fbf134bb5df34c66fd285a8"} Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.874273 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-f2dr7_89f428fd-8717-4819-81d8-ee04443b38a5/console/0.log" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.874539 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.884113 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-oauth-config\") pod \"89f428fd-8717-4819-81d8-ee04443b38a5\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.884202 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2ddt\" (UniqueName: \"kubernetes.io/projected/89f428fd-8717-4819-81d8-ee04443b38a5-kube-api-access-s2ddt\") pod \"89f428fd-8717-4819-81d8-ee04443b38a5\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.884233 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-oauth-serving-cert\") pod \"89f428fd-8717-4819-81d8-ee04443b38a5\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.884254 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-trusted-ca-bundle\") pod \"89f428fd-8717-4819-81d8-ee04443b38a5\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.884271 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-serving-cert\") pod \"89f428fd-8717-4819-81d8-ee04443b38a5\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.885026 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-service-ca\") pod \"89f428fd-8717-4819-81d8-ee04443b38a5\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.884981 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "89f428fd-8717-4819-81d8-ee04443b38a5" (UID: "89f428fd-8717-4819-81d8-ee04443b38a5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.885047 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "89f428fd-8717-4819-81d8-ee04443b38a5" (UID: "89f428fd-8717-4819-81d8-ee04443b38a5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.885118 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-console-config\") pod \"89f428fd-8717-4819-81d8-ee04443b38a5\" (UID: \"89f428fd-8717-4819-81d8-ee04443b38a5\") " Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.885442 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-service-ca" (OuterVolumeSpecName: "service-ca") pod "89f428fd-8717-4819-81d8-ee04443b38a5" (UID: "89f428fd-8717-4819-81d8-ee04443b38a5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.885615 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-console-config" (OuterVolumeSpecName: "console-config") pod "89f428fd-8717-4819-81d8-ee04443b38a5" (UID: "89f428fd-8717-4819-81d8-ee04443b38a5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.885974 4816 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.886068 4816 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-console-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.886086 4816 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.886094 4816 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89f428fd-8717-4819-81d8-ee04443b38a5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.888851 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "89f428fd-8717-4819-81d8-ee04443b38a5" (UID: "89f428fd-8717-4819-81d8-ee04443b38a5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.891188 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89f428fd-8717-4819-81d8-ee04443b38a5-kube-api-access-s2ddt" (OuterVolumeSpecName: "kube-api-access-s2ddt") pod "89f428fd-8717-4819-81d8-ee04443b38a5" (UID: "89f428fd-8717-4819-81d8-ee04443b38a5"). InnerVolumeSpecName "kube-api-access-s2ddt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.896076 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "89f428fd-8717-4819-81d8-ee04443b38a5" (UID: "89f428fd-8717-4819-81d8-ee04443b38a5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.987020 4816 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.987052 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2ddt\" (UniqueName: \"kubernetes.io/projected/89f428fd-8717-4819-81d8-ee04443b38a5-kube-api-access-s2ddt\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:39 crc kubenswrapper[4816]: I0216 13:16:39.987062 4816 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/89f428fd-8717-4819-81d8-ee04443b38a5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.705248 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-f2dr7_89f428fd-8717-4819-81d8-ee04443b38a5/console/0.log" Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.705525 4816 generic.go:334] "Generic (PLEG): container finished" podID="89f428fd-8717-4819-81d8-ee04443b38a5" containerID="2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607" exitCode=2 Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.705592 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f2dr7" event={"ID":"89f428fd-8717-4819-81d8-ee04443b38a5","Type":"ContainerDied","Data":"2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607"} Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.705604 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f2dr7" Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.705621 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f2dr7" event={"ID":"89f428fd-8717-4819-81d8-ee04443b38a5","Type":"ContainerDied","Data":"dd56cd12c0e97ab9fb375b75c9c53cb8c97a39a6ab22187cb826702982c5d998"} Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.705642 4816 scope.go:117] "RemoveContainer" containerID="2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607" Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.710222 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8b9pn" event={"ID":"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9","Type":"ContainerStarted","Data":"111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a"} Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.741116 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-f2dr7"] Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.746390 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-f2dr7"] Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.750402 4816 scope.go:117] "RemoveContainer" containerID="2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607" Feb 16 13:16:40 crc kubenswrapper[4816]: E0216 13:16:40.751029 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607\": container with ID starting with 2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607 not found: ID does not exist" containerID="2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607" Feb 16 13:16:40 crc kubenswrapper[4816]: I0216 13:16:40.751076 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607"} err="failed to get container status \"2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607\": rpc error: code = NotFound desc = could not find container \"2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607\": container with ID starting with 2183247a2bd44759a3282f3c8071b729325b6576f61fa870ec223437dbc3f607 not found: ID does not exist" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.358911 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.403719 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lcb2\" (UniqueName: \"kubernetes.io/projected/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-kube-api-access-6lcb2\") pod \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.404427 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-util\") pod \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.404496 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-bundle\") pod \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\" (UID: \"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1\") " Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.405974 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-bundle" (OuterVolumeSpecName: "bundle") pod "1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" (UID: "1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.417395 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-util" (OuterVolumeSpecName: "util") pod "1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" (UID: "1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.418280 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89f428fd-8717-4819-81d8-ee04443b38a5" path="/var/lib/kubelet/pods/89f428fd-8717-4819-81d8-ee04443b38a5/volumes" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.420933 4816 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-util\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.420957 4816 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.491999 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-kube-api-access-6lcb2" (OuterVolumeSpecName: "kube-api-access-6lcb2") pod "1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" (UID: "1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1"). InnerVolumeSpecName "kube-api-access-6lcb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.522120 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lcb2\" (UniqueName: \"kubernetes.io/projected/1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1-kube-api-access-6lcb2\") on node \"crc\" DevicePath \"\"" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.724202 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.726769 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9" event={"ID":"1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1","Type":"ContainerDied","Data":"122bd92a24ecb3a4bb6856b010d8461a98d508c4f177cc692cb69f4cd278a1a3"} Feb 16 13:16:41 crc kubenswrapper[4816]: I0216 13:16:41.726821 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="122bd92a24ecb3a4bb6856b010d8461a98d508c4f177cc692cb69f4cd278a1a3" Feb 16 13:16:42 crc kubenswrapper[4816]: I0216 13:16:42.730596 4816 generic.go:334] "Generic (PLEG): container finished" podID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerID="111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a" exitCode=0 Feb 16 13:16:42 crc kubenswrapper[4816]: I0216 13:16:42.730691 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8b9pn" event={"ID":"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9","Type":"ContainerDied","Data":"111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a"} Feb 16 13:16:43 crc kubenswrapper[4816]: I0216 13:16:43.738186 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8b9pn" event={"ID":"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9","Type":"ContainerStarted","Data":"612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9"} Feb 16 13:16:43 crc kubenswrapper[4816]: I0216 13:16:43.757420 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8b9pn" podStartSLOduration=2.207201999 podStartE2EDuration="5.757400449s" podCreationTimestamp="2026-02-16 13:16:38 +0000 UTC" firstStartedPulling="2026-02-16 13:16:39.560864272 +0000 UTC m=+798.887578000" lastFinishedPulling="2026-02-16 13:16:43.111062722 +0000 UTC m=+802.437776450" observedRunningTime="2026-02-16 13:16:43.753952471 +0000 UTC m=+803.080666199" watchObservedRunningTime="2026-02-16 13:16:43.757400449 +0000 UTC m=+803.084114177" Feb 16 13:16:48 crc kubenswrapper[4816]: I0216 13:16:48.814032 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:48 crc kubenswrapper[4816]: I0216 13:16:48.814647 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:49 crc kubenswrapper[4816]: I0216 13:16:49.863944 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8b9pn" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerName="registry-server" probeResult="failure" output=< Feb 16 13:16:49 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 13:16:49 crc kubenswrapper[4816]: > Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.518946 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7"] Feb 16 13:16:50 crc kubenswrapper[4816]: E0216 13:16:50.519145 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerName="extract" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.519156 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerName="extract" Feb 16 13:16:50 crc kubenswrapper[4816]: E0216 13:16:50.519165 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerName="pull" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.519170 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerName="pull" Feb 16 13:16:50 crc kubenswrapper[4816]: E0216 13:16:50.519186 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f428fd-8717-4819-81d8-ee04443b38a5" containerName="console" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.519194 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f428fd-8717-4819-81d8-ee04443b38a5" containerName="console" Feb 16 13:16:50 crc kubenswrapper[4816]: E0216 13:16:50.519204 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerName="util" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.519210 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerName="util" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.519297 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="89f428fd-8717-4819-81d8-ee04443b38a5" containerName="console" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.519313 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1" containerName="extract" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.519689 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.521585 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.521797 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-4sm82" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.521891 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.521962 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.522379 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.583220 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7"] Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.647330 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smjmf\" (UniqueName: \"kubernetes.io/projected/b88b2649-f69f-4572-9db2-a66e4ac5ec2d-kube-api-access-smjmf\") pod \"metallb-operator-controller-manager-5c799b7d57-xrnz7\" (UID: \"b88b2649-f69f-4572-9db2-a66e4ac5ec2d\") " pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.647415 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b88b2649-f69f-4572-9db2-a66e4ac5ec2d-webhook-cert\") pod \"metallb-operator-controller-manager-5c799b7d57-xrnz7\" (UID: \"b88b2649-f69f-4572-9db2-a66e4ac5ec2d\") " pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.647484 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b88b2649-f69f-4572-9db2-a66e4ac5ec2d-apiservice-cert\") pod \"metallb-operator-controller-manager-5c799b7d57-xrnz7\" (UID: \"b88b2649-f69f-4572-9db2-a66e4ac5ec2d\") " pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.748316 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b88b2649-f69f-4572-9db2-a66e4ac5ec2d-apiservice-cert\") pod \"metallb-operator-controller-manager-5c799b7d57-xrnz7\" (UID: \"b88b2649-f69f-4572-9db2-a66e4ac5ec2d\") " pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.748409 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smjmf\" (UniqueName: \"kubernetes.io/projected/b88b2649-f69f-4572-9db2-a66e4ac5ec2d-kube-api-access-smjmf\") pod \"metallb-operator-controller-manager-5c799b7d57-xrnz7\" (UID: \"b88b2649-f69f-4572-9db2-a66e4ac5ec2d\") " pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.748456 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b88b2649-f69f-4572-9db2-a66e4ac5ec2d-webhook-cert\") pod \"metallb-operator-controller-manager-5c799b7d57-xrnz7\" (UID: \"b88b2649-f69f-4572-9db2-a66e4ac5ec2d\") " pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.756977 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b88b2649-f69f-4572-9db2-a66e4ac5ec2d-apiservice-cert\") pod \"metallb-operator-controller-manager-5c799b7d57-xrnz7\" (UID: \"b88b2649-f69f-4572-9db2-a66e4ac5ec2d\") " pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.766308 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b88b2649-f69f-4572-9db2-a66e4ac5ec2d-webhook-cert\") pod \"metallb-operator-controller-manager-5c799b7d57-xrnz7\" (UID: \"b88b2649-f69f-4572-9db2-a66e4ac5ec2d\") " pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.769516 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smjmf\" (UniqueName: \"kubernetes.io/projected/b88b2649-f69f-4572-9db2-a66e4ac5ec2d-kube-api-access-smjmf\") pod \"metallb-operator-controller-manager-5c799b7d57-xrnz7\" (UID: \"b88b2649-f69f-4572-9db2-a66e4ac5ec2d\") " pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.781943 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd"] Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.782612 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.784480 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.784488 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.784488 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-s5rnn" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.803209 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd"] Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.837868 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.950324 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1-webhook-cert\") pod \"metallb-operator-webhook-server-54976bdc59-m2djd\" (UID: \"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1\") " pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.950388 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1-apiservice-cert\") pod \"metallb-operator-webhook-server-54976bdc59-m2djd\" (UID: \"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1\") " pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:50 crc kubenswrapper[4816]: I0216 13:16:50.950425 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn2vw\" (UniqueName: \"kubernetes.io/projected/1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1-kube-api-access-qn2vw\") pod \"metallb-operator-webhook-server-54976bdc59-m2djd\" (UID: \"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1\") " pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.051520 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn2vw\" (UniqueName: \"kubernetes.io/projected/1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1-kube-api-access-qn2vw\") pod \"metallb-operator-webhook-server-54976bdc59-m2djd\" (UID: \"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1\") " pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.051621 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1-webhook-cert\") pod \"metallb-operator-webhook-server-54976bdc59-m2djd\" (UID: \"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1\") " pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.051676 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1-apiservice-cert\") pod \"metallb-operator-webhook-server-54976bdc59-m2djd\" (UID: \"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1\") " pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.058388 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1-webhook-cert\") pod \"metallb-operator-webhook-server-54976bdc59-m2djd\" (UID: \"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1\") " pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.060837 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1-apiservice-cert\") pod \"metallb-operator-webhook-server-54976bdc59-m2djd\" (UID: \"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1\") " pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.087437 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn2vw\" (UniqueName: \"kubernetes.io/projected/1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1-kube-api-access-qn2vw\") pod \"metallb-operator-webhook-server-54976bdc59-m2djd\" (UID: \"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1\") " pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.118835 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.132367 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7"] Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.412431 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd"] Feb 16 13:16:51 crc kubenswrapper[4816]: W0216 13:16:51.426810 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ce8bd4d_5ddc_43bc_9a7c_bf99ca1f29c1.slice/crio-4fd1ff20a71b4644872f0d5168a6c904ca1ecd2022d1a2545516f678478f54ff WatchSource:0}: Error finding container 4fd1ff20a71b4644872f0d5168a6c904ca1ecd2022d1a2545516f678478f54ff: Status 404 returned error can't find the container with id 4fd1ff20a71b4644872f0d5168a6c904ca1ecd2022d1a2545516f678478f54ff Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.777247 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" event={"ID":"b88b2649-f69f-4572-9db2-a66e4ac5ec2d","Type":"ContainerStarted","Data":"d8ae19b4b0d94081898c9d90ae3f51a5fcfe147947d65bcba5e4a2506f35508c"} Feb 16 13:16:51 crc kubenswrapper[4816]: I0216 13:16:51.779494 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" event={"ID":"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1","Type":"ContainerStarted","Data":"4fd1ff20a71b4644872f0d5168a6c904ca1ecd2022d1a2545516f678478f54ff"} Feb 16 13:16:54 crc kubenswrapper[4816]: I0216 13:16:54.862322 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" event={"ID":"b88b2649-f69f-4572-9db2-a66e4ac5ec2d","Type":"ContainerStarted","Data":"4f53d904945146d418c2432fdc0c760312d609602a225e933d2ba770c4f3b078"} Feb 16 13:16:54 crc kubenswrapper[4816]: I0216 13:16:54.862671 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:16:54 crc kubenswrapper[4816]: I0216 13:16:54.888173 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" podStartSLOduration=1.421288004 podStartE2EDuration="4.888152904s" podCreationTimestamp="2026-02-16 13:16:50 +0000 UTC" firstStartedPulling="2026-02-16 13:16:51.158278295 +0000 UTC m=+810.484992023" lastFinishedPulling="2026-02-16 13:16:54.625143195 +0000 UTC m=+813.951856923" observedRunningTime="2026-02-16 13:16:54.883077396 +0000 UTC m=+814.209791124" watchObservedRunningTime="2026-02-16 13:16:54.888152904 +0000 UTC m=+814.214866632" Feb 16 13:16:57 crc kubenswrapper[4816]: I0216 13:16:57.885025 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" event={"ID":"1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1","Type":"ContainerStarted","Data":"69f23f0560f3e1b8602627beedf9e72b40d1d680c21db738083dfa472b307622"} Feb 16 13:16:57 crc kubenswrapper[4816]: I0216 13:16:57.885432 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:16:57 crc kubenswrapper[4816]: I0216 13:16:57.903777 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" podStartSLOduration=1.649164869 podStartE2EDuration="7.903754693s" podCreationTimestamp="2026-02-16 13:16:50 +0000 UTC" firstStartedPulling="2026-02-16 13:16:51.43076571 +0000 UTC m=+810.757479438" lastFinishedPulling="2026-02-16 13:16:57.685355534 +0000 UTC m=+817.012069262" observedRunningTime="2026-02-16 13:16:57.901938874 +0000 UTC m=+817.228652612" watchObservedRunningTime="2026-02-16 13:16:57.903754693 +0000 UTC m=+817.230468421" Feb 16 13:16:58 crc kubenswrapper[4816]: I0216 13:16:58.853061 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:16:58 crc kubenswrapper[4816]: I0216 13:16:58.900392 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.064501 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8b9pn"] Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.064775 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8b9pn" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerName="registry-server" containerID="cri-o://612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9" gracePeriod=2 Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.410022 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.520166 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-utilities\") pod \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.520287 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gh6r\" (UniqueName: \"kubernetes.io/projected/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-kube-api-access-9gh6r\") pod \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.520348 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-catalog-content\") pod \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\" (UID: \"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9\") " Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.520956 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-utilities" (OuterVolumeSpecName: "utilities") pod "9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" (UID: "9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.527296 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-kube-api-access-9gh6r" (OuterVolumeSpecName: "kube-api-access-9gh6r") pod "9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" (UID: "9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9"). InnerVolumeSpecName "kube-api-access-9gh6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.621520 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.621558 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gh6r\" (UniqueName: \"kubernetes.io/projected/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-kube-api-access-9gh6r\") on node \"crc\" DevicePath \"\"" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.644357 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" (UID: "9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.723298 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.902670 4816 generic.go:334] "Generic (PLEG): container finished" podID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerID="612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9" exitCode=0 Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.902717 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8b9pn" event={"ID":"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9","Type":"ContainerDied","Data":"612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9"} Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.902762 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8b9pn" event={"ID":"9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9","Type":"ContainerDied","Data":"a4c47abe6161decd284754a68bb35df8119edf678fbf134bb5df34c66fd285a8"} Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.902778 4816 scope.go:117] "RemoveContainer" containerID="612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.902803 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8b9pn" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.917641 4816 scope.go:117] "RemoveContainer" containerID="111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.934595 4816 scope.go:117] "RemoveContainer" containerID="193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.936410 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8b9pn"] Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.942208 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8b9pn"] Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.971236 4816 scope.go:117] "RemoveContainer" containerID="612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9" Feb 16 13:17:00 crc kubenswrapper[4816]: E0216 13:17:00.972000 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9\": container with ID starting with 612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9 not found: ID does not exist" containerID="612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.972034 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9"} err="failed to get container status \"612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9\": rpc error: code = NotFound desc = could not find container \"612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9\": container with ID starting with 612d1673fe7a8403ecfc095533fdcf5e2adacdca8bcde34e0cb34e7f9b3df2e9 not found: ID does not exist" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.972054 4816 scope.go:117] "RemoveContainer" containerID="111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a" Feb 16 13:17:00 crc kubenswrapper[4816]: E0216 13:17:00.972810 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a\": container with ID starting with 111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a not found: ID does not exist" containerID="111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.972838 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a"} err="failed to get container status \"111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a\": rpc error: code = NotFound desc = could not find container \"111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a\": container with ID starting with 111452f8eca7565d8a82cae8c68c6c0f87cf4014933b8263bb41a85adec49f9a not found: ID does not exist" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.972853 4816 scope.go:117] "RemoveContainer" containerID="193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13" Feb 16 13:17:00 crc kubenswrapper[4816]: E0216 13:17:00.973283 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13\": container with ID starting with 193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13 not found: ID does not exist" containerID="193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13" Feb 16 13:17:00 crc kubenswrapper[4816]: I0216 13:17:00.973315 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13"} err="failed to get container status \"193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13\": rpc error: code = NotFound desc = could not find container \"193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13\": container with ID starting with 193a9954e767c62331307596194d33308ba98d2b119f134cf01ad7fa9d774c13 not found: ID does not exist" Feb 16 13:17:01 crc kubenswrapper[4816]: I0216 13:17:01.405640 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" path="/var/lib/kubelet/pods/9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9/volumes" Feb 16 13:17:06 crc kubenswrapper[4816]: I0216 13:17:06.940999 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:17:06 crc kubenswrapper[4816]: I0216 13:17:06.941511 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:17:11 crc kubenswrapper[4816]: I0216 13:17:11.123566 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-54976bdc59-m2djd" Feb 16 13:17:30 crc kubenswrapper[4816]: I0216 13:17:30.841695 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5c799b7d57-xrnz7" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.530593 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb"] Feb 16 13:17:31 crc kubenswrapper[4816]: E0216 13:17:31.531439 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerName="extract-utilities" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.531457 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerName="extract-utilities" Feb 16 13:17:31 crc kubenswrapper[4816]: E0216 13:17:31.531470 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerName="registry-server" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.531476 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerName="registry-server" Feb 16 13:17:31 crc kubenswrapper[4816]: E0216 13:17:31.531493 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerName="extract-content" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.531500 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerName="extract-content" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.531616 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d1ca3f5-f93c-4f2a-ae3c-e02274ee24b9" containerName="registry-server" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.532128 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.534386 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.535026 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-b2fgw" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.535489 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0fa65809-3f66-4677-bec4-82775c83a07f-cert\") pod \"frr-k8s-webhook-server-78b44bf5bb-ctqqb\" (UID: \"0fa65809-3f66-4677-bec4-82775c83a07f\") " pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.535550 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xs4s\" (UniqueName: \"kubernetes.io/projected/0fa65809-3f66-4677-bec4-82775c83a07f-kube-api-access-7xs4s\") pod \"frr-k8s-webhook-server-78b44bf5bb-ctqqb\" (UID: \"0fa65809-3f66-4677-bec4-82775c83a07f\") " pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.536103 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-sl9h9"] Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.539027 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.543503 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb"] Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.545284 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.545451 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.608143 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-46t2n"] Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.608967 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.610927 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.611159 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.612740 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.612762 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-fllz4" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.622686 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-69bbfbf88f-srbbd"] Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.623835 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.625508 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.636515 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0fa65809-3f66-4677-bec4-82775c83a07f-cert\") pod \"frr-k8s-webhook-server-78b44bf5bb-ctqqb\" (UID: \"0fa65809-3f66-4677-bec4-82775c83a07f\") " pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.636567 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xs4s\" (UniqueName: \"kubernetes.io/projected/0fa65809-3f66-4677-bec4-82775c83a07f-kube-api-access-7xs4s\") pod \"frr-k8s-webhook-server-78b44bf5bb-ctqqb\" (UID: \"0fa65809-3f66-4677-bec4-82775c83a07f\") " pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:31 crc kubenswrapper[4816]: E0216 13:17:31.636970 4816 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Feb 16 13:17:31 crc kubenswrapper[4816]: E0216 13:17:31.637018 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0fa65809-3f66-4677-bec4-82775c83a07f-cert podName:0fa65809-3f66-4677-bec4-82775c83a07f nodeName:}" failed. No retries permitted until 2026-02-16 13:17:32.137001924 +0000 UTC m=+851.463715652 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0fa65809-3f66-4677-bec4-82775c83a07f-cert") pod "frr-k8s-webhook-server-78b44bf5bb-ctqqb" (UID: "0fa65809-3f66-4677-bec4-82775c83a07f") : secret "frr-k8s-webhook-server-cert" not found Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.643604 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-69bbfbf88f-srbbd"] Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.666386 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xs4s\" (UniqueName: \"kubernetes.io/projected/0fa65809-3f66-4677-bec4-82775c83a07f-kube-api-access-7xs4s\") pod \"frr-k8s-webhook-server-78b44bf5bb-ctqqb\" (UID: \"0fa65809-3f66-4677-bec4-82775c83a07f\") " pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738059 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-frr-conf\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738296 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8f542a71-53fe-4588-85ed-1c8bffb0b2c2-cert\") pod \"controller-69bbfbf88f-srbbd\" (UID: \"8f542a71-53fe-4588-85ed-1c8bffb0b2c2\") " pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738345 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qffnr\" (UniqueName: \"kubernetes.io/projected/8f542a71-53fe-4588-85ed-1c8bffb0b2c2-kube-api-access-qffnr\") pod \"controller-69bbfbf88f-srbbd\" (UID: \"8f542a71-53fe-4588-85ed-1c8bffb0b2c2\") " pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738367 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f542a71-53fe-4588-85ed-1c8bffb0b2c2-metrics-certs\") pod \"controller-69bbfbf88f-srbbd\" (UID: \"8f542a71-53fe-4588-85ed-1c8bffb0b2c2\") " pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738389 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbxnk\" (UniqueName: \"kubernetes.io/projected/9b2ea8e1-2288-4080-a20e-637ea18dc35c-kube-api-access-kbxnk\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738458 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-frr-sockets\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738482 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-memberlist\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738499 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9b2ea8e1-2288-4080-a20e-637ea18dc35c-metrics-certs\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738525 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-reloader\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738545 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9b2ea8e1-2288-4080-a20e-637ea18dc35c-frr-startup\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738590 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2thf\" (UniqueName: \"kubernetes.io/projected/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-kube-api-access-b2thf\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738603 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-metrics-certs\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738637 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-metallb-excludel2\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.738677 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-metrics\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840009 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-frr-conf\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840102 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8f542a71-53fe-4588-85ed-1c8bffb0b2c2-cert\") pod \"controller-69bbfbf88f-srbbd\" (UID: \"8f542a71-53fe-4588-85ed-1c8bffb0b2c2\") " pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840120 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qffnr\" (UniqueName: \"kubernetes.io/projected/8f542a71-53fe-4588-85ed-1c8bffb0b2c2-kube-api-access-qffnr\") pod \"controller-69bbfbf88f-srbbd\" (UID: \"8f542a71-53fe-4588-85ed-1c8bffb0b2c2\") " pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840135 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f542a71-53fe-4588-85ed-1c8bffb0b2c2-metrics-certs\") pod \"controller-69bbfbf88f-srbbd\" (UID: \"8f542a71-53fe-4588-85ed-1c8bffb0b2c2\") " pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840152 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbxnk\" (UniqueName: \"kubernetes.io/projected/9b2ea8e1-2288-4080-a20e-637ea18dc35c-kube-api-access-kbxnk\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840184 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-frr-sockets\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840219 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-memberlist\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840235 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9b2ea8e1-2288-4080-a20e-637ea18dc35c-metrics-certs\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840253 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-reloader\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840268 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9b2ea8e1-2288-4080-a20e-637ea18dc35c-frr-startup\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840291 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2thf\" (UniqueName: \"kubernetes.io/projected/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-kube-api-access-b2thf\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840304 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-metrics-certs\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840324 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-metallb-excludel2\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840341 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-metrics\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: E0216 13:17:31.840405 4816 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 16 13:17:31 crc kubenswrapper[4816]: E0216 13:17:31.840486 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-memberlist podName:a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4 nodeName:}" failed. No retries permitted until 2026-02-16 13:17:32.340460569 +0000 UTC m=+851.667174297 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-memberlist") pod "speaker-46t2n" (UID: "a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4") : secret "metallb-memberlist" not found Feb 16 13:17:31 crc kubenswrapper[4816]: E0216 13:17:31.840739 4816 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840757 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-metrics\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: E0216 13:17:31.840804 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-metrics-certs podName:a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4 nodeName:}" failed. No retries permitted until 2026-02-16 13:17:32.340786468 +0000 UTC m=+851.667500216 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-metrics-certs") pod "speaker-46t2n" (UID: "a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4") : secret "speaker-certs-secret" not found Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.840740 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-frr-sockets\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.841043 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-reloader\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.841333 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9b2ea8e1-2288-4080-a20e-637ea18dc35c-frr-conf\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.841423 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-metallb-excludel2\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.841747 4816 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.846541 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9b2ea8e1-2288-4080-a20e-637ea18dc35c-frr-startup\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.849776 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f542a71-53fe-4588-85ed-1c8bffb0b2c2-metrics-certs\") pod \"controller-69bbfbf88f-srbbd\" (UID: \"8f542a71-53fe-4588-85ed-1c8bffb0b2c2\") " pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.853288 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8f542a71-53fe-4588-85ed-1c8bffb0b2c2-cert\") pod \"controller-69bbfbf88f-srbbd\" (UID: \"8f542a71-53fe-4588-85ed-1c8bffb0b2c2\") " pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.853314 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9b2ea8e1-2288-4080-a20e-637ea18dc35c-metrics-certs\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.866310 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2thf\" (UniqueName: \"kubernetes.io/projected/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-kube-api-access-b2thf\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.867061 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qffnr\" (UniqueName: \"kubernetes.io/projected/8f542a71-53fe-4588-85ed-1c8bffb0b2c2-kube-api-access-qffnr\") pod \"controller-69bbfbf88f-srbbd\" (UID: \"8f542a71-53fe-4588-85ed-1c8bffb0b2c2\") " pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.869155 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbxnk\" (UniqueName: \"kubernetes.io/projected/9b2ea8e1-2288-4080-a20e-637ea18dc35c-kube-api-access-kbxnk\") pod \"frr-k8s-sl9h9\" (UID: \"9b2ea8e1-2288-4080-a20e-637ea18dc35c\") " pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:31 crc kubenswrapper[4816]: I0216 13:17:31.938323 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:32 crc kubenswrapper[4816]: I0216 13:17:32.144561 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0fa65809-3f66-4677-bec4-82775c83a07f-cert\") pod \"frr-k8s-webhook-server-78b44bf5bb-ctqqb\" (UID: \"0fa65809-3f66-4677-bec4-82775c83a07f\") " pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:32 crc kubenswrapper[4816]: I0216 13:17:32.150057 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0fa65809-3f66-4677-bec4-82775c83a07f-cert\") pod \"frr-k8s-webhook-server-78b44bf5bb-ctqqb\" (UID: \"0fa65809-3f66-4677-bec4-82775c83a07f\") " pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:32 crc kubenswrapper[4816]: I0216 13:17:32.150964 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-69bbfbf88f-srbbd"] Feb 16 13:17:32 crc kubenswrapper[4816]: I0216 13:17:32.153057 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:32 crc kubenswrapper[4816]: I0216 13:17:32.163805 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:32 crc kubenswrapper[4816]: I0216 13:17:32.347396 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-memberlist\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:32 crc kubenswrapper[4816]: I0216 13:17:32.347730 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-metrics-certs\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:32 crc kubenswrapper[4816]: E0216 13:17:32.347851 4816 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 16 13:17:32 crc kubenswrapper[4816]: E0216 13:17:32.347914 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-memberlist podName:a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4 nodeName:}" failed. No retries permitted until 2026-02-16 13:17:33.347895943 +0000 UTC m=+852.674609671 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-memberlist") pod "speaker-46t2n" (UID: "a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4") : secret "metallb-memberlist" not found Feb 16 13:17:32 crc kubenswrapper[4816]: I0216 13:17:32.354333 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-metrics-certs\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:32 crc kubenswrapper[4816]: I0216 13:17:32.378375 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb"] Feb 16 13:17:32 crc kubenswrapper[4816]: W0216 13:17:32.385565 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fa65809_3f66_4677_bec4_82775c83a07f.slice/crio-3cc5ad8477d6d4d8d5327aed26bd3d6eb405e582c30ba92f93bcd04423a2cd0c WatchSource:0}: Error finding container 3cc5ad8477d6d4d8d5327aed26bd3d6eb405e582c30ba92f93bcd04423a2cd0c: Status 404 returned error can't find the container with id 3cc5ad8477d6d4d8d5327aed26bd3d6eb405e582c30ba92f93bcd04423a2cd0c Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.076396 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerStarted","Data":"5fb6da99e6354bdcc49897c9df7b5daf8c9ce0e1982a76667c468607a47bf554"} Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.077635 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" event={"ID":"0fa65809-3f66-4677-bec4-82775c83a07f","Type":"ContainerStarted","Data":"3cc5ad8477d6d4d8d5327aed26bd3d6eb405e582c30ba92f93bcd04423a2cd0c"} Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.079506 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-69bbfbf88f-srbbd" event={"ID":"8f542a71-53fe-4588-85ed-1c8bffb0b2c2","Type":"ContainerStarted","Data":"77823f1287f3f94f6d7240d3929a2411d0ed13093a320fb4c81d88e3cc4bf29f"} Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.079538 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-69bbfbf88f-srbbd" event={"ID":"8f542a71-53fe-4588-85ed-1c8bffb0b2c2","Type":"ContainerStarted","Data":"5c3a2bc14f0b6b2dc231c2112afeb13deca4b0c0352e6b3340275b5992474644"} Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.079551 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-69bbfbf88f-srbbd" event={"ID":"8f542a71-53fe-4588-85ed-1c8bffb0b2c2","Type":"ContainerStarted","Data":"d6b83bf332887e59822b7d181b38f952f6c4271d3976b65322da50fbbaa4beb8"} Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.079692 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.104459 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-69bbfbf88f-srbbd" podStartSLOduration=2.104436559 podStartE2EDuration="2.104436559s" podCreationTimestamp="2026-02-16 13:17:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:17:33.099795684 +0000 UTC m=+852.426509412" watchObservedRunningTime="2026-02-16 13:17:33.104436559 +0000 UTC m=+852.431150297" Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.361526 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-memberlist\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.371306 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4-memberlist\") pod \"speaker-46t2n\" (UID: \"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4\") " pod="metallb-system/speaker-46t2n" Feb 16 13:17:33 crc kubenswrapper[4816]: I0216 13:17:33.423603 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-46t2n" Feb 16 13:17:33 crc kubenswrapper[4816]: W0216 13:17:33.450999 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3d90fa9_733d_4cd0_b4c3_5c1c3c7308a4.slice/crio-e1c813238d939b5973137691d0a3266f639f0731992ff056b05557bc8de10c87 WatchSource:0}: Error finding container e1c813238d939b5973137691d0a3266f639f0731992ff056b05557bc8de10c87: Status 404 returned error can't find the container with id e1c813238d939b5973137691d0a3266f639f0731992ff056b05557bc8de10c87 Feb 16 13:17:34 crc kubenswrapper[4816]: I0216 13:17:34.086828 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-46t2n" event={"ID":"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4","Type":"ContainerStarted","Data":"2a88bab98884eaec0c6099dbf57c24739f803ab636a6f8318179e11d01101b44"} Feb 16 13:17:34 crc kubenswrapper[4816]: I0216 13:17:34.087049 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-46t2n" event={"ID":"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4","Type":"ContainerStarted","Data":"e1d6b73ffbe522255ae99741250bca265781cbb0ddb8be82f35323c011c08f13"} Feb 16 13:17:34 crc kubenswrapper[4816]: I0216 13:17:34.087059 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-46t2n" event={"ID":"a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4","Type":"ContainerStarted","Data":"e1c813238d939b5973137691d0a3266f639f0731992ff056b05557bc8de10c87"} Feb 16 13:17:34 crc kubenswrapper[4816]: I0216 13:17:34.087536 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-46t2n" Feb 16 13:17:34 crc kubenswrapper[4816]: I0216 13:17:34.111755 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-46t2n" podStartSLOduration=3.111736733 podStartE2EDuration="3.111736733s" podCreationTimestamp="2026-02-16 13:17:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:17:34.111015373 +0000 UTC m=+853.437729101" watchObservedRunningTime="2026-02-16 13:17:34.111736733 +0000 UTC m=+853.438450461" Feb 16 13:17:36 crc kubenswrapper[4816]: I0216 13:17:36.940533 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:17:36 crc kubenswrapper[4816]: I0216 13:17:36.940850 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:17:36 crc kubenswrapper[4816]: I0216 13:17:36.940892 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:17:36 crc kubenswrapper[4816]: I0216 13:17:36.941431 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7e0981cd562f683639a286c8a9849e9acb7985787e5b7fb344492cda47873ec7"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:17:36 crc kubenswrapper[4816]: I0216 13:17:36.941482 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://7e0981cd562f683639a286c8a9849e9acb7985787e5b7fb344492cda47873ec7" gracePeriod=600 Feb 16 13:17:37 crc kubenswrapper[4816]: I0216 13:17:37.138935 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="7e0981cd562f683639a286c8a9849e9acb7985787e5b7fb344492cda47873ec7" exitCode=0 Feb 16 13:17:37 crc kubenswrapper[4816]: I0216 13:17:37.138963 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"7e0981cd562f683639a286c8a9849e9acb7985787e5b7fb344492cda47873ec7"} Feb 16 13:17:37 crc kubenswrapper[4816]: I0216 13:17:37.139021 4816 scope.go:117] "RemoveContainer" containerID="468b1f3fff8cff5cea98c4eb833e160c55f4f1d43b584a9679974b9249403d15" Feb 16 13:17:40 crc kubenswrapper[4816]: I0216 13:17:40.162203 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"64ab67741e223081f84c6d63a99c0d895038e507375b2c1f1a0cf120b6972be0"} Feb 16 13:17:40 crc kubenswrapper[4816]: I0216 13:17:40.164668 4816 generic.go:334] "Generic (PLEG): container finished" podID="9b2ea8e1-2288-4080-a20e-637ea18dc35c" containerID="1cc7b7a8bae4366d62df57d73a3e3ce947dcccab737d5929518a7f6b635254ac" exitCode=0 Feb 16 13:17:40 crc kubenswrapper[4816]: I0216 13:17:40.164824 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerDied","Data":"1cc7b7a8bae4366d62df57d73a3e3ce947dcccab737d5929518a7f6b635254ac"} Feb 16 13:17:40 crc kubenswrapper[4816]: I0216 13:17:40.166120 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" event={"ID":"0fa65809-3f66-4677-bec4-82775c83a07f","Type":"ContainerStarted","Data":"e6585cb72a675838213f03ad1eb5de1e92dcdaf28a4a9d4564289da0c2dc7d9a"} Feb 16 13:17:40 crc kubenswrapper[4816]: I0216 13:17:40.166325 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:40 crc kubenswrapper[4816]: I0216 13:17:40.215246 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" podStartSLOduration=2.130490336 podStartE2EDuration="9.21522842s" podCreationTimestamp="2026-02-16 13:17:31 +0000 UTC" firstStartedPulling="2026-02-16 13:17:32.389030958 +0000 UTC m=+851.715744686" lastFinishedPulling="2026-02-16 13:17:39.473769042 +0000 UTC m=+858.800482770" observedRunningTime="2026-02-16 13:17:40.208742654 +0000 UTC m=+859.535456402" watchObservedRunningTime="2026-02-16 13:17:40.21522842 +0000 UTC m=+859.541942148" Feb 16 13:17:41 crc kubenswrapper[4816]: I0216 13:17:41.173669 4816 generic.go:334] "Generic (PLEG): container finished" podID="9b2ea8e1-2288-4080-a20e-637ea18dc35c" containerID="8bd5a6b1e40a45af903687fdfdccbb1711395b366d4cffeb02c43c042504aac8" exitCode=0 Feb 16 13:17:41 crc kubenswrapper[4816]: I0216 13:17:41.173725 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerDied","Data":"8bd5a6b1e40a45af903687fdfdccbb1711395b366d4cffeb02c43c042504aac8"} Feb 16 13:17:42 crc kubenswrapper[4816]: I0216 13:17:42.179354 4816 generic.go:334] "Generic (PLEG): container finished" podID="9b2ea8e1-2288-4080-a20e-637ea18dc35c" containerID="d4819700edf1e0477c27ece6fc4adc6415d61519085362fed0fe2716b550ab99" exitCode=0 Feb 16 13:17:42 crc kubenswrapper[4816]: I0216 13:17:42.179416 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerDied","Data":"d4819700edf1e0477c27ece6fc4adc6415d61519085362fed0fe2716b550ab99"} Feb 16 13:17:43 crc kubenswrapper[4816]: I0216 13:17:43.195180 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerStarted","Data":"bc5405ab16933f4a614716f78a7206ad74d9534850f4dbe2db73d68b7c8b73e7"} Feb 16 13:17:43 crc kubenswrapper[4816]: I0216 13:17:43.195475 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerStarted","Data":"1c5ef42d4c7423c9eee1ca4fa469a3cb07d25d30627395f991de058c03e4f5e2"} Feb 16 13:17:43 crc kubenswrapper[4816]: I0216 13:17:43.195485 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerStarted","Data":"a6c38b171763e7feb376aa116bf6de7845fdffb4958a2608bd1a410f7610a0bf"} Feb 16 13:17:43 crc kubenswrapper[4816]: I0216 13:17:43.195493 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerStarted","Data":"5551b26a428e2194cd90dbb8d67ba7e6503062b8ee6a7e3dc3bc51b492ad45bc"} Feb 16 13:17:43 crc kubenswrapper[4816]: I0216 13:17:43.195501 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerStarted","Data":"c9a01e4c99c0cd2213813667e104bc4a400d3fc578c62830bd19b7d1166e8d06"} Feb 16 13:17:43 crc kubenswrapper[4816]: I0216 13:17:43.195509 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sl9h9" event={"ID":"9b2ea8e1-2288-4080-a20e-637ea18dc35c","Type":"ContainerStarted","Data":"3b5c1d618ba4259581e0f88618d9c0e7f8c9f1957255620a059b3f8aaf9b1237"} Feb 16 13:17:43 crc kubenswrapper[4816]: I0216 13:17:43.195754 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:43 crc kubenswrapper[4816]: I0216 13:17:43.234966 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-sl9h9" podStartSLOduration=5.116095991 podStartE2EDuration="12.23494806s" podCreationTimestamp="2026-02-16 13:17:31 +0000 UTC" firstStartedPulling="2026-02-16 13:17:32.369211891 +0000 UTC m=+851.695925619" lastFinishedPulling="2026-02-16 13:17:39.48806397 +0000 UTC m=+858.814777688" observedRunningTime="2026-02-16 13:17:43.23199917 +0000 UTC m=+862.558712918" watchObservedRunningTime="2026-02-16 13:17:43.23494806 +0000 UTC m=+862.561661798" Feb 16 13:17:43 crc kubenswrapper[4816]: I0216 13:17:43.427355 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-46t2n" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.800399 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv"] Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.802060 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.804026 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.811471 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.811539 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vnqb\" (UniqueName: \"kubernetes.io/projected/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-kube-api-access-8vnqb\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.811623 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.818421 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv"] Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.912567 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.912879 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vnqb\" (UniqueName: \"kubernetes.io/projected/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-kube-api-access-8vnqb\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.912942 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.913042 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.913372 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:44 crc kubenswrapper[4816]: I0216 13:17:44.930453 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vnqb\" (UniqueName: \"kubernetes.io/projected/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-kube-api-access-8vnqb\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:45 crc kubenswrapper[4816]: I0216 13:17:45.119589 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:45 crc kubenswrapper[4816]: I0216 13:17:45.523499 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv"] Feb 16 13:17:45 crc kubenswrapper[4816]: W0216 13:17:45.533179 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a1c7127_fb16_4db0_ba07_5a9589c5ace2.slice/crio-05111be610cc8333d61e374375724560ad8923ee5f6a4612631eb08c92002759 WatchSource:0}: Error finding container 05111be610cc8333d61e374375724560ad8923ee5f6a4612631eb08c92002759: Status 404 returned error can't find the container with id 05111be610cc8333d61e374375724560ad8923ee5f6a4612631eb08c92002759 Feb 16 13:17:46 crc kubenswrapper[4816]: I0216 13:17:46.212290 4816 generic.go:334] "Generic (PLEG): container finished" podID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerID="4317e5da84c713c8c3b2ddedfe512c05f8c4d33ba1bd911acdeb12e5fdc2774d" exitCode=0 Feb 16 13:17:46 crc kubenswrapper[4816]: I0216 13:17:46.212328 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" event={"ID":"1a1c7127-fb16-4db0-ba07-5a9589c5ace2","Type":"ContainerDied","Data":"4317e5da84c713c8c3b2ddedfe512c05f8c4d33ba1bd911acdeb12e5fdc2774d"} Feb 16 13:17:46 crc kubenswrapper[4816]: I0216 13:17:46.212358 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" event={"ID":"1a1c7127-fb16-4db0-ba07-5a9589c5ace2","Type":"ContainerStarted","Data":"05111be610cc8333d61e374375724560ad8923ee5f6a4612631eb08c92002759"} Feb 16 13:17:47 crc kubenswrapper[4816]: I0216 13:17:47.164617 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:47 crc kubenswrapper[4816]: I0216 13:17:47.206798 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:51 crc kubenswrapper[4816]: I0216 13:17:51.252166 4816 generic.go:334] "Generic (PLEG): container finished" podID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerID="922f450745476ef2828be3db22c0d6382c40e77f307dad7a37d5f4bfcf5452dd" exitCode=0 Feb 16 13:17:51 crc kubenswrapper[4816]: I0216 13:17:51.252205 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" event={"ID":"1a1c7127-fb16-4db0-ba07-5a9589c5ace2","Type":"ContainerDied","Data":"922f450745476ef2828be3db22c0d6382c40e77f307dad7a37d5f4bfcf5452dd"} Feb 16 13:17:51 crc kubenswrapper[4816]: I0216 13:17:51.947561 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-69bbfbf88f-srbbd" Feb 16 13:17:52 crc kubenswrapper[4816]: I0216 13:17:52.158278 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-78b44bf5bb-ctqqb" Feb 16 13:17:52 crc kubenswrapper[4816]: I0216 13:17:52.167566 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-sl9h9" Feb 16 13:17:52 crc kubenswrapper[4816]: I0216 13:17:52.258922 4816 generic.go:334] "Generic (PLEG): container finished" podID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerID="88dbcb1ed7f6ea8a6c99a83afa4ec4323ebfda82f52853c79ee1322fa217d0c2" exitCode=0 Feb 16 13:17:52 crc kubenswrapper[4816]: I0216 13:17:52.258962 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" event={"ID":"1a1c7127-fb16-4db0-ba07-5a9589c5ace2","Type":"ContainerDied","Data":"88dbcb1ed7f6ea8a6c99a83afa4ec4323ebfda82f52853c79ee1322fa217d0c2"} Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.565850 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.744503 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vnqb\" (UniqueName: \"kubernetes.io/projected/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-kube-api-access-8vnqb\") pod \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.744771 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-util\") pod \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.744886 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-bundle\") pod \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\" (UID: \"1a1c7127-fb16-4db0-ba07-5a9589c5ace2\") " Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.745875 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-bundle" (OuterVolumeSpecName: "bundle") pod "1a1c7127-fb16-4db0-ba07-5a9589c5ace2" (UID: "1a1c7127-fb16-4db0-ba07-5a9589c5ace2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.749642 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-kube-api-access-8vnqb" (OuterVolumeSpecName: "kube-api-access-8vnqb") pod "1a1c7127-fb16-4db0-ba07-5a9589c5ace2" (UID: "1a1c7127-fb16-4db0-ba07-5a9589c5ace2"). InnerVolumeSpecName "kube-api-access-8vnqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.755522 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-util" (OuterVolumeSpecName: "util") pod "1a1c7127-fb16-4db0-ba07-5a9589c5ace2" (UID: "1a1c7127-fb16-4db0-ba07-5a9589c5ace2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.846529 4816 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.846574 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vnqb\" (UniqueName: \"kubernetes.io/projected/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-kube-api-access-8vnqb\") on node \"crc\" DevicePath \"\"" Feb 16 13:17:53 crc kubenswrapper[4816]: I0216 13:17:53.846584 4816 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a1c7127-fb16-4db0-ba07-5a9589c5ace2-util\") on node \"crc\" DevicePath \"\"" Feb 16 13:17:54 crc kubenswrapper[4816]: I0216 13:17:54.275920 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" event={"ID":"1a1c7127-fb16-4db0-ba07-5a9589c5ace2","Type":"ContainerDied","Data":"05111be610cc8333d61e374375724560ad8923ee5f6a4612631eb08c92002759"} Feb 16 13:17:54 crc kubenswrapper[4816]: I0216 13:17:54.275967 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05111be610cc8333d61e374375724560ad8923ee5f6a4612631eb08c92002759" Feb 16 13:17:54 crc kubenswrapper[4816]: I0216 13:17:54.275973 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.279772 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7"] Feb 16 13:17:58 crc kubenswrapper[4816]: E0216 13:17:58.280569 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerName="extract" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.280583 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerName="extract" Feb 16 13:17:58 crc kubenswrapper[4816]: E0216 13:17:58.280597 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerName="pull" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.280604 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerName="pull" Feb 16 13:17:58 crc kubenswrapper[4816]: E0216 13:17:58.280621 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerName="util" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.280630 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerName="util" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.280773 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a1c7127-fb16-4db0-ba07-5a9589c5ace2" containerName="extract" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.281252 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.284847 4816 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-ghhsb" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.284929 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.284987 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.297065 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7"] Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.452423 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/24e777d7-f017-4bc2-b9a4-65c5f8da823b-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-lsnz7\" (UID: \"24e777d7-f017-4bc2-b9a4-65c5f8da823b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.452518 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9blh\" (UniqueName: \"kubernetes.io/projected/24e777d7-f017-4bc2-b9a4-65c5f8da823b-kube-api-access-j9blh\") pod \"cert-manager-operator-controller-manager-66c8bdd694-lsnz7\" (UID: \"24e777d7-f017-4bc2-b9a4-65c5f8da823b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.553407 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/24e777d7-f017-4bc2-b9a4-65c5f8da823b-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-lsnz7\" (UID: \"24e777d7-f017-4bc2-b9a4-65c5f8da823b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.553497 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9blh\" (UniqueName: \"kubernetes.io/projected/24e777d7-f017-4bc2-b9a4-65c5f8da823b-kube-api-access-j9blh\") pod \"cert-manager-operator-controller-manager-66c8bdd694-lsnz7\" (UID: \"24e777d7-f017-4bc2-b9a4-65c5f8da823b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.554142 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/24e777d7-f017-4bc2-b9a4-65c5f8da823b-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-lsnz7\" (UID: \"24e777d7-f017-4bc2-b9a4-65c5f8da823b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.574619 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9blh\" (UniqueName: \"kubernetes.io/projected/24e777d7-f017-4bc2-b9a4-65c5f8da823b-kube-api-access-j9blh\") pod \"cert-manager-operator-controller-manager-66c8bdd694-lsnz7\" (UID: \"24e777d7-f017-4bc2-b9a4-65c5f8da823b\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.604999 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" Feb 16 13:17:58 crc kubenswrapper[4816]: I0216 13:17:58.952608 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7"] Feb 16 13:17:58 crc kubenswrapper[4816]: W0216 13:17:58.961379 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24e777d7_f017_4bc2_b9a4_65c5f8da823b.slice/crio-dec0dd1bed81b2ff23770fea18f56b9c82ef00d0f53e4647bd6022a36ce0da27 WatchSource:0}: Error finding container dec0dd1bed81b2ff23770fea18f56b9c82ef00d0f53e4647bd6022a36ce0da27: Status 404 returned error can't find the container with id dec0dd1bed81b2ff23770fea18f56b9c82ef00d0f53e4647bd6022a36ce0da27 Feb 16 13:17:59 crc kubenswrapper[4816]: I0216 13:17:59.314960 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" event={"ID":"24e777d7-f017-4bc2-b9a4-65c5f8da823b","Type":"ContainerStarted","Data":"dec0dd1bed81b2ff23770fea18f56b9c82ef00d0f53e4647bd6022a36ce0da27"} Feb 16 13:18:02 crc kubenswrapper[4816]: I0216 13:18:02.338072 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" event={"ID":"24e777d7-f017-4bc2-b9a4-65c5f8da823b","Type":"ContainerStarted","Data":"a0d981ac4111a19076c41fd74e271f2ef157ca41319a0234c8ef6b59ef4aba51"} Feb 16 13:18:02 crc kubenswrapper[4816]: I0216 13:18:02.360249 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-lsnz7" podStartSLOduration=1.459798879 podStartE2EDuration="4.360229457s" podCreationTimestamp="2026-02-16 13:17:58 +0000 UTC" firstStartedPulling="2026-02-16 13:17:58.964109413 +0000 UTC m=+878.290823141" lastFinishedPulling="2026-02-16 13:18:01.864539991 +0000 UTC m=+881.191253719" observedRunningTime="2026-02-16 13:18:02.355038216 +0000 UTC m=+881.681751974" watchObservedRunningTime="2026-02-16 13:18:02.360229457 +0000 UTC m=+881.686943185" Feb 16 13:18:06 crc kubenswrapper[4816]: I0216 13:18:06.832530 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-p4ghc"] Feb 16 13:18:06 crc kubenswrapper[4816]: I0216 13:18:06.833829 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:06 crc kubenswrapper[4816]: I0216 13:18:06.836727 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Feb 16 13:18:06 crc kubenswrapper[4816]: I0216 13:18:06.836762 4816 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-snv6j" Feb 16 13:18:06 crc kubenswrapper[4816]: I0216 13:18:06.836908 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Feb 16 13:18:06 crc kubenswrapper[4816]: I0216 13:18:06.845354 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-p4ghc"] Feb 16 13:18:06 crc kubenswrapper[4816]: I0216 13:18:06.955126 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f05f1fe5-1b23-4adb-a77f-b2f665050c31-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-p4ghc\" (UID: \"f05f1fe5-1b23-4adb-a77f-b2f665050c31\") " pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:06 crc kubenswrapper[4816]: I0216 13:18:06.955245 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgrbs\" (UniqueName: \"kubernetes.io/projected/f05f1fe5-1b23-4adb-a77f-b2f665050c31-kube-api-access-qgrbs\") pod \"cert-manager-webhook-6888856db4-p4ghc\" (UID: \"f05f1fe5-1b23-4adb-a77f-b2f665050c31\") " pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:07 crc kubenswrapper[4816]: I0216 13:18:07.056996 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f05f1fe5-1b23-4adb-a77f-b2f665050c31-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-p4ghc\" (UID: \"f05f1fe5-1b23-4adb-a77f-b2f665050c31\") " pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:07 crc kubenswrapper[4816]: I0216 13:18:07.057062 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgrbs\" (UniqueName: \"kubernetes.io/projected/f05f1fe5-1b23-4adb-a77f-b2f665050c31-kube-api-access-qgrbs\") pod \"cert-manager-webhook-6888856db4-p4ghc\" (UID: \"f05f1fe5-1b23-4adb-a77f-b2f665050c31\") " pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:07 crc kubenswrapper[4816]: I0216 13:18:07.086589 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f05f1fe5-1b23-4adb-a77f-b2f665050c31-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-p4ghc\" (UID: \"f05f1fe5-1b23-4adb-a77f-b2f665050c31\") " pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:07 crc kubenswrapper[4816]: I0216 13:18:07.097402 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgrbs\" (UniqueName: \"kubernetes.io/projected/f05f1fe5-1b23-4adb-a77f-b2f665050c31-kube-api-access-qgrbs\") pod \"cert-manager-webhook-6888856db4-p4ghc\" (UID: \"f05f1fe5-1b23-4adb-a77f-b2f665050c31\") " pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:07 crc kubenswrapper[4816]: I0216 13:18:07.148299 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:07 crc kubenswrapper[4816]: I0216 13:18:07.619846 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-p4ghc"] Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.056702 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-mxfr4"] Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.057859 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.060794 4816 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-r22sj" Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.068124 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k2x7\" (UniqueName: \"kubernetes.io/projected/ef08db43-1ed7-4676-9c65-a7de37ad26de-kube-api-access-2k2x7\") pod \"cert-manager-cainjector-5545bd876-mxfr4\" (UID: \"ef08db43-1ed7-4676-9c65-a7de37ad26de\") " pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.068205 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ef08db43-1ed7-4676-9c65-a7de37ad26de-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-mxfr4\" (UID: \"ef08db43-1ed7-4676-9c65-a7de37ad26de\") " pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.073043 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-mxfr4"] Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.169446 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k2x7\" (UniqueName: \"kubernetes.io/projected/ef08db43-1ed7-4676-9c65-a7de37ad26de-kube-api-access-2k2x7\") pod \"cert-manager-cainjector-5545bd876-mxfr4\" (UID: \"ef08db43-1ed7-4676-9c65-a7de37ad26de\") " pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.169518 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ef08db43-1ed7-4676-9c65-a7de37ad26de-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-mxfr4\" (UID: \"ef08db43-1ed7-4676-9c65-a7de37ad26de\") " pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.188403 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k2x7\" (UniqueName: \"kubernetes.io/projected/ef08db43-1ed7-4676-9c65-a7de37ad26de-kube-api-access-2k2x7\") pod \"cert-manager-cainjector-5545bd876-mxfr4\" (UID: \"ef08db43-1ed7-4676-9c65-a7de37ad26de\") " pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.192467 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ef08db43-1ed7-4676-9c65-a7de37ad26de-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-mxfr4\" (UID: \"ef08db43-1ed7-4676-9c65-a7de37ad26de\") " pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.376437 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.378024 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" event={"ID":"f05f1fe5-1b23-4adb-a77f-b2f665050c31","Type":"ContainerStarted","Data":"1c92081c8926cc35b98bc122084b302b0a4476343b267d6e769db0d6e2d33cde"} Feb 16 13:18:08 crc kubenswrapper[4816]: I0216 13:18:08.850597 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-mxfr4"] Feb 16 13:18:08 crc kubenswrapper[4816]: W0216 13:18:08.858792 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef08db43_1ed7_4676_9c65_a7de37ad26de.slice/crio-21afae6ef01fc6eee9930ce4113cd18cdafd0dc24e0a674adf944517b65b2e5c WatchSource:0}: Error finding container 21afae6ef01fc6eee9930ce4113cd18cdafd0dc24e0a674adf944517b65b2e5c: Status 404 returned error can't find the container with id 21afae6ef01fc6eee9930ce4113cd18cdafd0dc24e0a674adf944517b65b2e5c Feb 16 13:18:09 crc kubenswrapper[4816]: I0216 13:18:09.385370 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" event={"ID":"ef08db43-1ed7-4676-9c65-a7de37ad26de","Type":"ContainerStarted","Data":"21afae6ef01fc6eee9930ce4113cd18cdafd0dc24e0a674adf944517b65b2e5c"} Feb 16 13:18:13 crc kubenswrapper[4816]: I0216 13:18:13.428415 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" event={"ID":"ef08db43-1ed7-4676-9c65-a7de37ad26de","Type":"ContainerStarted","Data":"238b6dd54a4e8594305fb007803ad01280bde0377ce0b142606ed9571d9d26ef"} Feb 16 13:18:13 crc kubenswrapper[4816]: I0216 13:18:13.430611 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" event={"ID":"f05f1fe5-1b23-4adb-a77f-b2f665050c31","Type":"ContainerStarted","Data":"3a2caef2f273339da08648cc3f9bdf04d7249bb68861c69740f49d6c89987a38"} Feb 16 13:18:13 crc kubenswrapper[4816]: I0216 13:18:13.435463 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:13 crc kubenswrapper[4816]: I0216 13:18:13.451675 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-5545bd876-mxfr4" podStartSLOduration=1.6732002879999999 podStartE2EDuration="5.451647225s" podCreationTimestamp="2026-02-16 13:18:08 +0000 UTC" firstStartedPulling="2026-02-16 13:18:08.861808605 +0000 UTC m=+888.188522333" lastFinishedPulling="2026-02-16 13:18:12.640255542 +0000 UTC m=+891.966969270" observedRunningTime="2026-02-16 13:18:13.451278075 +0000 UTC m=+892.777991813" watchObservedRunningTime="2026-02-16 13:18:13.451647225 +0000 UTC m=+892.778360953" Feb 16 13:18:17 crc kubenswrapper[4816]: I0216 13:18:17.150905 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" Feb 16 13:18:17 crc kubenswrapper[4816]: I0216 13:18:17.174281 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-6888856db4-p4ghc" podStartSLOduration=6.177730297 podStartE2EDuration="11.174247758s" podCreationTimestamp="2026-02-16 13:18:06 +0000 UTC" firstStartedPulling="2026-02-16 13:18:07.62674758 +0000 UTC m=+886.953461308" lastFinishedPulling="2026-02-16 13:18:12.623265051 +0000 UTC m=+891.949978769" observedRunningTime="2026-02-16 13:18:13.641744958 +0000 UTC m=+892.968458686" watchObservedRunningTime="2026-02-16 13:18:17.174247758 +0000 UTC m=+896.500961536" Feb 16 13:18:24 crc kubenswrapper[4816]: I0216 13:18:24.986394 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-545d4d4674-8z8xq"] Feb 16 13:18:24 crc kubenswrapper[4816]: I0216 13:18:24.987782 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-8z8xq" Feb 16 13:18:24 crc kubenswrapper[4816]: I0216 13:18:24.991101 4816 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-gq2hp" Feb 16 13:18:25 crc kubenswrapper[4816]: I0216 13:18:25.005042 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-8z8xq"] Feb 16 13:18:25 crc kubenswrapper[4816]: I0216 13:18:25.146686 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tj24\" (UniqueName: \"kubernetes.io/projected/5dea32e8-76ae-4e83-a2d1-49410b066382-kube-api-access-6tj24\") pod \"cert-manager-545d4d4674-8z8xq\" (UID: \"5dea32e8-76ae-4e83-a2d1-49410b066382\") " pod="cert-manager/cert-manager-545d4d4674-8z8xq" Feb 16 13:18:25 crc kubenswrapper[4816]: I0216 13:18:25.146802 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5dea32e8-76ae-4e83-a2d1-49410b066382-bound-sa-token\") pod \"cert-manager-545d4d4674-8z8xq\" (UID: \"5dea32e8-76ae-4e83-a2d1-49410b066382\") " pod="cert-manager/cert-manager-545d4d4674-8z8xq" Feb 16 13:18:25 crc kubenswrapper[4816]: I0216 13:18:25.247573 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tj24\" (UniqueName: \"kubernetes.io/projected/5dea32e8-76ae-4e83-a2d1-49410b066382-kube-api-access-6tj24\") pod \"cert-manager-545d4d4674-8z8xq\" (UID: \"5dea32e8-76ae-4e83-a2d1-49410b066382\") " pod="cert-manager/cert-manager-545d4d4674-8z8xq" Feb 16 13:18:25 crc kubenswrapper[4816]: I0216 13:18:25.247636 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5dea32e8-76ae-4e83-a2d1-49410b066382-bound-sa-token\") pod \"cert-manager-545d4d4674-8z8xq\" (UID: \"5dea32e8-76ae-4e83-a2d1-49410b066382\") " pod="cert-manager/cert-manager-545d4d4674-8z8xq" Feb 16 13:18:25 crc kubenswrapper[4816]: I0216 13:18:25.278404 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5dea32e8-76ae-4e83-a2d1-49410b066382-bound-sa-token\") pod \"cert-manager-545d4d4674-8z8xq\" (UID: \"5dea32e8-76ae-4e83-a2d1-49410b066382\") " pod="cert-manager/cert-manager-545d4d4674-8z8xq" Feb 16 13:18:25 crc kubenswrapper[4816]: I0216 13:18:25.278859 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tj24\" (UniqueName: \"kubernetes.io/projected/5dea32e8-76ae-4e83-a2d1-49410b066382-kube-api-access-6tj24\") pod \"cert-manager-545d4d4674-8z8xq\" (UID: \"5dea32e8-76ae-4e83-a2d1-49410b066382\") " pod="cert-manager/cert-manager-545d4d4674-8z8xq" Feb 16 13:18:25 crc kubenswrapper[4816]: I0216 13:18:25.354994 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-8z8xq" Feb 16 13:18:25 crc kubenswrapper[4816]: I0216 13:18:25.807042 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-8z8xq"] Feb 16 13:18:26 crc kubenswrapper[4816]: I0216 13:18:26.516003 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-8z8xq" event={"ID":"5dea32e8-76ae-4e83-a2d1-49410b066382","Type":"ContainerStarted","Data":"a9d59d02f1696280d7e905003b2c24fdc889c4d7024bcded9db8bca46876de35"} Feb 16 13:18:26 crc kubenswrapper[4816]: I0216 13:18:26.516327 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-8z8xq" event={"ID":"5dea32e8-76ae-4e83-a2d1-49410b066382","Type":"ContainerStarted","Data":"933996a1ef01907240b366fafac0085f4cd4f3c8824b8c93c9cc74556b42336f"} Feb 16 13:18:26 crc kubenswrapper[4816]: I0216 13:18:26.537252 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-545d4d4674-8z8xq" podStartSLOduration=2.537235615 podStartE2EDuration="2.537235615s" podCreationTimestamp="2026-02-16 13:18:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:18:26.533807732 +0000 UTC m=+905.860521460" watchObservedRunningTime="2026-02-16 13:18:26.537235615 +0000 UTC m=+905.863949343" Feb 16 13:18:29 crc kubenswrapper[4816]: I0216 13:18:29.821361 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-brvmn"] Feb 16 13:18:29 crc kubenswrapper[4816]: I0216 13:18:29.822706 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:29 crc kubenswrapper[4816]: I0216 13:18:29.836789 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-brvmn"] Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.014739 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-catalog-content\") pod \"redhat-marketplace-brvmn\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.014790 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z92kc\" (UniqueName: \"kubernetes.io/projected/5150894a-f6fb-46c3-8619-a770290e63b8-kube-api-access-z92kc\") pod \"redhat-marketplace-brvmn\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.014809 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-utilities\") pod \"redhat-marketplace-brvmn\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.116396 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-catalog-content\") pod \"redhat-marketplace-brvmn\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.116720 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z92kc\" (UniqueName: \"kubernetes.io/projected/5150894a-f6fb-46c3-8619-a770290e63b8-kube-api-access-z92kc\") pod \"redhat-marketplace-brvmn\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.116829 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-utilities\") pod \"redhat-marketplace-brvmn\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.117020 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-catalog-content\") pod \"redhat-marketplace-brvmn\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.117528 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-utilities\") pod \"redhat-marketplace-brvmn\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.138441 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z92kc\" (UniqueName: \"kubernetes.io/projected/5150894a-f6fb-46c3-8619-a770290e63b8-kube-api-access-z92kc\") pod \"redhat-marketplace-brvmn\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.142805 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:30 crc kubenswrapper[4816]: I0216 13:18:30.574930 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-brvmn"] Feb 16 13:18:30 crc kubenswrapper[4816]: W0216 13:18:30.585459 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5150894a_f6fb_46c3_8619_a770290e63b8.slice/crio-44660aa92b3cb9c5141462898d33ad1e8c79de407ea4f14d175c7924bfd5ad9b WatchSource:0}: Error finding container 44660aa92b3cb9c5141462898d33ad1e8c79de407ea4f14d175c7924bfd5ad9b: Status 404 returned error can't find the container with id 44660aa92b3cb9c5141462898d33ad1e8c79de407ea4f14d175c7924bfd5ad9b Feb 16 13:18:31 crc kubenswrapper[4816]: I0216 13:18:31.556587 4816 generic.go:334] "Generic (PLEG): container finished" podID="5150894a-f6fb-46c3-8619-a770290e63b8" containerID="ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba" exitCode=0 Feb 16 13:18:31 crc kubenswrapper[4816]: I0216 13:18:31.556682 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brvmn" event={"ID":"5150894a-f6fb-46c3-8619-a770290e63b8","Type":"ContainerDied","Data":"ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba"} Feb 16 13:18:31 crc kubenswrapper[4816]: I0216 13:18:31.557009 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brvmn" event={"ID":"5150894a-f6fb-46c3-8619-a770290e63b8","Type":"ContainerStarted","Data":"44660aa92b3cb9c5141462898d33ad1e8c79de407ea4f14d175c7924bfd5ad9b"} Feb 16 13:18:32 crc kubenswrapper[4816]: I0216 13:18:32.564117 4816 generic.go:334] "Generic (PLEG): container finished" podID="5150894a-f6fb-46c3-8619-a770290e63b8" containerID="2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821" exitCode=0 Feb 16 13:18:32 crc kubenswrapper[4816]: I0216 13:18:32.564232 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brvmn" event={"ID":"5150894a-f6fb-46c3-8619-a770290e63b8","Type":"ContainerDied","Data":"2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821"} Feb 16 13:18:33 crc kubenswrapper[4816]: I0216 13:18:33.571813 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brvmn" event={"ID":"5150894a-f6fb-46c3-8619-a770290e63b8","Type":"ContainerStarted","Data":"9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b"} Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.007537 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-brvmn" podStartSLOduration=3.601797657 podStartE2EDuration="5.007505342s" podCreationTimestamp="2026-02-16 13:18:29 +0000 UTC" firstStartedPulling="2026-02-16 13:18:31.558860811 +0000 UTC m=+910.885574569" lastFinishedPulling="2026-02-16 13:18:32.964568526 +0000 UTC m=+912.291282254" observedRunningTime="2026-02-16 13:18:33.601205559 +0000 UTC m=+912.927919277" watchObservedRunningTime="2026-02-16 13:18:34.007505342 +0000 UTC m=+913.334219110" Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.011091 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-h6rdl"] Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.012050 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-h6rdl" Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.014561 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.014894 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.015126 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-5t42c" Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.022998 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-h6rdl"] Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.165366 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2t5f\" (UniqueName: \"kubernetes.io/projected/dc1bb7d3-5870-411d-81f5-3aa5ef055420-kube-api-access-l2t5f\") pod \"openstack-operator-index-h6rdl\" (UID: \"dc1bb7d3-5870-411d-81f5-3aa5ef055420\") " pod="openstack-operators/openstack-operator-index-h6rdl" Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.267385 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2t5f\" (UniqueName: \"kubernetes.io/projected/dc1bb7d3-5870-411d-81f5-3aa5ef055420-kube-api-access-l2t5f\") pod \"openstack-operator-index-h6rdl\" (UID: \"dc1bb7d3-5870-411d-81f5-3aa5ef055420\") " pod="openstack-operators/openstack-operator-index-h6rdl" Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.289367 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2t5f\" (UniqueName: \"kubernetes.io/projected/dc1bb7d3-5870-411d-81f5-3aa5ef055420-kube-api-access-l2t5f\") pod \"openstack-operator-index-h6rdl\" (UID: \"dc1bb7d3-5870-411d-81f5-3aa5ef055420\") " pod="openstack-operators/openstack-operator-index-h6rdl" Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.343686 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-h6rdl" Feb 16 13:18:34 crc kubenswrapper[4816]: I0216 13:18:34.756491 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-h6rdl"] Feb 16 13:18:34 crc kubenswrapper[4816]: W0216 13:18:34.767513 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc1bb7d3_5870_411d_81f5_3aa5ef055420.slice/crio-7b0cb473f61a0ba61713752b90e270419fefeca8346db6da9d8c472ab99c073d WatchSource:0}: Error finding container 7b0cb473f61a0ba61713752b90e270419fefeca8346db6da9d8c472ab99c073d: Status 404 returned error can't find the container with id 7b0cb473f61a0ba61713752b90e270419fefeca8346db6da9d8c472ab99c073d Feb 16 13:18:35 crc kubenswrapper[4816]: I0216 13:18:35.612354 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-h6rdl" event={"ID":"dc1bb7d3-5870-411d-81f5-3aa5ef055420","Type":"ContainerStarted","Data":"7b0cb473f61a0ba61713752b90e270419fefeca8346db6da9d8c472ab99c073d"} Feb 16 13:18:37 crc kubenswrapper[4816]: I0216 13:18:37.627863 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-h6rdl" event={"ID":"dc1bb7d3-5870-411d-81f5-3aa5ef055420","Type":"ContainerStarted","Data":"866b4ede75f99f81ef3c144939d00234f28314852d95da73dd81d3c89f86e15a"} Feb 16 13:18:37 crc kubenswrapper[4816]: I0216 13:18:37.661346 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-h6rdl" podStartSLOduration=2.545821741 podStartE2EDuration="4.661327157s" podCreationTimestamp="2026-02-16 13:18:33 +0000 UTC" firstStartedPulling="2026-02-16 13:18:34.769547833 +0000 UTC m=+914.096261581" lastFinishedPulling="2026-02-16 13:18:36.885053269 +0000 UTC m=+916.211766997" observedRunningTime="2026-02-16 13:18:37.65997647 +0000 UTC m=+916.986690218" watchObservedRunningTime="2026-02-16 13:18:37.661327157 +0000 UTC m=+916.988040905" Feb 16 13:18:40 crc kubenswrapper[4816]: I0216 13:18:40.143087 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:40 crc kubenswrapper[4816]: I0216 13:18:40.143445 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:40 crc kubenswrapper[4816]: I0216 13:18:40.189546 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:40 crc kubenswrapper[4816]: I0216 13:18:40.716884 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.807523 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b2x65"] Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.808998 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.840005 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b2x65"] Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.888581 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13bf1400-800d-4ec5-ad7d-af42faede5b4-catalog-content\") pod \"community-operators-b2x65\" (UID: \"13bf1400-800d-4ec5-ad7d-af42faede5b4\") " pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.889154 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13bf1400-800d-4ec5-ad7d-af42faede5b4-utilities\") pod \"community-operators-b2x65\" (UID: \"13bf1400-800d-4ec5-ad7d-af42faede5b4\") " pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.889400 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5zbx\" (UniqueName: \"kubernetes.io/projected/13bf1400-800d-4ec5-ad7d-af42faede5b4-kube-api-access-l5zbx\") pod \"community-operators-b2x65\" (UID: \"13bf1400-800d-4ec5-ad7d-af42faede5b4\") " pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.991584 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13bf1400-800d-4ec5-ad7d-af42faede5b4-catalog-content\") pod \"community-operators-b2x65\" (UID: \"13bf1400-800d-4ec5-ad7d-af42faede5b4\") " pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.991696 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13bf1400-800d-4ec5-ad7d-af42faede5b4-utilities\") pod \"community-operators-b2x65\" (UID: \"13bf1400-800d-4ec5-ad7d-af42faede5b4\") " pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.991727 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5zbx\" (UniqueName: \"kubernetes.io/projected/13bf1400-800d-4ec5-ad7d-af42faede5b4-kube-api-access-l5zbx\") pod \"community-operators-b2x65\" (UID: \"13bf1400-800d-4ec5-ad7d-af42faede5b4\") " pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.992795 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13bf1400-800d-4ec5-ad7d-af42faede5b4-catalog-content\") pod \"community-operators-b2x65\" (UID: \"13bf1400-800d-4ec5-ad7d-af42faede5b4\") " pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:42 crc kubenswrapper[4816]: I0216 13:18:42.992944 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13bf1400-800d-4ec5-ad7d-af42faede5b4-utilities\") pod \"community-operators-b2x65\" (UID: \"13bf1400-800d-4ec5-ad7d-af42faede5b4\") " pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:43 crc kubenswrapper[4816]: I0216 13:18:43.041325 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5zbx\" (UniqueName: \"kubernetes.io/projected/13bf1400-800d-4ec5-ad7d-af42faede5b4-kube-api-access-l5zbx\") pod \"community-operators-b2x65\" (UID: \"13bf1400-800d-4ec5-ad7d-af42faede5b4\") " pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:43 crc kubenswrapper[4816]: I0216 13:18:43.150005 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:43 crc kubenswrapper[4816]: I0216 13:18:43.558320 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b2x65"] Feb 16 13:18:43 crc kubenswrapper[4816]: I0216 13:18:43.666878 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2x65" event={"ID":"13bf1400-800d-4ec5-ad7d-af42faede5b4","Type":"ContainerStarted","Data":"4e0e6d057e0d1da2d4ee9e83ac014e9857b6e0203a36663d38d8dd2005ee065a"} Feb 16 13:18:44 crc kubenswrapper[4816]: I0216 13:18:44.344865 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-h6rdl" Feb 16 13:18:44 crc kubenswrapper[4816]: I0216 13:18:44.345256 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-h6rdl" Feb 16 13:18:44 crc kubenswrapper[4816]: I0216 13:18:44.387571 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-h6rdl" Feb 16 13:18:44 crc kubenswrapper[4816]: I0216 13:18:44.392434 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-brvmn"] Feb 16 13:18:44 crc kubenswrapper[4816]: I0216 13:18:44.392722 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-brvmn" podUID="5150894a-f6fb-46c3-8619-a770290e63b8" containerName="registry-server" containerID="cri-o://9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b" gracePeriod=2 Feb 16 13:18:44 crc kubenswrapper[4816]: I0216 13:18:44.675267 4816 generic.go:334] "Generic (PLEG): container finished" podID="13bf1400-800d-4ec5-ad7d-af42faede5b4" containerID="f62df7c28eb59ac2e653b72fd6487758bd7dd039afd88b3b49e99af344e61f66" exitCode=0 Feb 16 13:18:44 crc kubenswrapper[4816]: I0216 13:18:44.676579 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2x65" event={"ID":"13bf1400-800d-4ec5-ad7d-af42faede5b4","Type":"ContainerDied","Data":"f62df7c28eb59ac2e653b72fd6487758bd7dd039afd88b3b49e99af344e61f66"} Feb 16 13:18:44 crc kubenswrapper[4816]: I0216 13:18:44.712291 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-h6rdl" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.446937 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.528519 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-catalog-content\") pod \"5150894a-f6fb-46c3-8619-a770290e63b8\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.528897 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-utilities\") pod \"5150894a-f6fb-46c3-8619-a770290e63b8\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.529079 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z92kc\" (UniqueName: \"kubernetes.io/projected/5150894a-f6fb-46c3-8619-a770290e63b8-kube-api-access-z92kc\") pod \"5150894a-f6fb-46c3-8619-a770290e63b8\" (UID: \"5150894a-f6fb-46c3-8619-a770290e63b8\") " Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.530341 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-utilities" (OuterVolumeSpecName: "utilities") pod "5150894a-f6fb-46c3-8619-a770290e63b8" (UID: "5150894a-f6fb-46c3-8619-a770290e63b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.533992 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5150894a-f6fb-46c3-8619-a770290e63b8-kube-api-access-z92kc" (OuterVolumeSpecName: "kube-api-access-z92kc") pod "5150894a-f6fb-46c3-8619-a770290e63b8" (UID: "5150894a-f6fb-46c3-8619-a770290e63b8"). InnerVolumeSpecName "kube-api-access-z92kc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.551442 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5150894a-f6fb-46c3-8619-a770290e63b8" (UID: "5150894a-f6fb-46c3-8619-a770290e63b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.632032 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z92kc\" (UniqueName: \"kubernetes.io/projected/5150894a-f6fb-46c3-8619-a770290e63b8-kube-api-access-z92kc\") on node \"crc\" DevicePath \"\"" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.632096 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.632136 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5150894a-f6fb-46c3-8619-a770290e63b8-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.683337 4816 generic.go:334] "Generic (PLEG): container finished" podID="5150894a-f6fb-46c3-8619-a770290e63b8" containerID="9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b" exitCode=0 Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.683393 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-brvmn" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.683460 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brvmn" event={"ID":"5150894a-f6fb-46c3-8619-a770290e63b8","Type":"ContainerDied","Data":"9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b"} Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.683531 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-brvmn" event={"ID":"5150894a-f6fb-46c3-8619-a770290e63b8","Type":"ContainerDied","Data":"44660aa92b3cb9c5141462898d33ad1e8c79de407ea4f14d175c7924bfd5ad9b"} Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.683554 4816 scope.go:117] "RemoveContainer" containerID="9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.706292 4816 scope.go:117] "RemoveContainer" containerID="2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.721887 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-brvmn"] Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.727463 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-brvmn"] Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.744145 4816 scope.go:117] "RemoveContainer" containerID="ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.770447 4816 scope.go:117] "RemoveContainer" containerID="9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b" Feb 16 13:18:45 crc kubenswrapper[4816]: E0216 13:18:45.771018 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b\": container with ID starting with 9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b not found: ID does not exist" containerID="9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.771062 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b"} err="failed to get container status \"9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b\": rpc error: code = NotFound desc = could not find container \"9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b\": container with ID starting with 9c946b17e7c61460b48d5ef3a645faa6c99bf754763e3405645de9239f86068b not found: ID does not exist" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.771089 4816 scope.go:117] "RemoveContainer" containerID="2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821" Feb 16 13:18:45 crc kubenswrapper[4816]: E0216 13:18:45.771408 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821\": container with ID starting with 2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821 not found: ID does not exist" containerID="2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.771461 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821"} err="failed to get container status \"2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821\": rpc error: code = NotFound desc = could not find container \"2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821\": container with ID starting with 2d1a3e3ae18d2483af999b25e35e62ce8c0fd0636a1a89b10a77df641ae31821 not found: ID does not exist" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.771481 4816 scope.go:117] "RemoveContainer" containerID="ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba" Feb 16 13:18:45 crc kubenswrapper[4816]: E0216 13:18:45.771803 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba\": container with ID starting with ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba not found: ID does not exist" containerID="ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba" Feb 16 13:18:45 crc kubenswrapper[4816]: I0216 13:18:45.771896 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba"} err="failed to get container status \"ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba\": rpc error: code = NotFound desc = could not find container \"ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba\": container with ID starting with ee58b2f4dcc7ad2516747a1fcfec9a8eee520dc48f41aae7a5f7bb40aa3939ba not found: ID does not exist" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.411379 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5150894a-f6fb-46c3-8619-a770290e63b8" path="/var/lib/kubelet/pods/5150894a-f6fb-46c3-8619-a770290e63b8/volumes" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.829821 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns"] Feb 16 13:18:47 crc kubenswrapper[4816]: E0216 13:18:47.830164 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5150894a-f6fb-46c3-8619-a770290e63b8" containerName="registry-server" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.830188 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5150894a-f6fb-46c3-8619-a770290e63b8" containerName="registry-server" Feb 16 13:18:47 crc kubenswrapper[4816]: E0216 13:18:47.830210 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5150894a-f6fb-46c3-8619-a770290e63b8" containerName="extract-content" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.830219 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5150894a-f6fb-46c3-8619-a770290e63b8" containerName="extract-content" Feb 16 13:18:47 crc kubenswrapper[4816]: E0216 13:18:47.830228 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5150894a-f6fb-46c3-8619-a770290e63b8" containerName="extract-utilities" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.830236 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5150894a-f6fb-46c3-8619-a770290e63b8" containerName="extract-utilities" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.830376 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5150894a-f6fb-46c3-8619-a770290e63b8" containerName="registry-server" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.831372 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.836776 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-sc92l" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.837812 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns"] Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.968244 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njbpl\" (UniqueName: \"kubernetes.io/projected/cf420361-538f-4de6-8e0b-09bda2ae4d4c-kube-api-access-njbpl\") pod \"839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.968549 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-bundle\") pod \"839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:47 crc kubenswrapper[4816]: I0216 13:18:47.968585 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-util\") pod \"839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:48 crc kubenswrapper[4816]: I0216 13:18:48.070031 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-util\") pod \"839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:48 crc kubenswrapper[4816]: I0216 13:18:48.070160 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njbpl\" (UniqueName: \"kubernetes.io/projected/cf420361-538f-4de6-8e0b-09bda2ae4d4c-kube-api-access-njbpl\") pod \"839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:48 crc kubenswrapper[4816]: I0216 13:18:48.070184 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-bundle\") pod \"839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:48 crc kubenswrapper[4816]: I0216 13:18:48.070696 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-bundle\") pod \"839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:48 crc kubenswrapper[4816]: I0216 13:18:48.070845 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-util\") pod \"839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:48 crc kubenswrapper[4816]: I0216 13:18:48.089452 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njbpl\" (UniqueName: \"kubernetes.io/projected/cf420361-538f-4de6-8e0b-09bda2ae4d4c-kube-api-access-njbpl\") pod \"839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:48 crc kubenswrapper[4816]: I0216 13:18:48.149992 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:49 crc kubenswrapper[4816]: I0216 13:18:49.183605 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns"] Feb 16 13:18:49 crc kubenswrapper[4816]: W0216 13:18:49.200841 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf420361_538f_4de6_8e0b_09bda2ae4d4c.slice/crio-e5c512dd59f98741c8a373d0c6e186a1181d7a43409620496630a8ae8ae4ef2c WatchSource:0}: Error finding container e5c512dd59f98741c8a373d0c6e186a1181d7a43409620496630a8ae8ae4ef2c: Status 404 returned error can't find the container with id e5c512dd59f98741c8a373d0c6e186a1181d7a43409620496630a8ae8ae4ef2c Feb 16 13:18:49 crc kubenswrapper[4816]: I0216 13:18:49.715173 4816 generic.go:334] "Generic (PLEG): container finished" podID="13bf1400-800d-4ec5-ad7d-af42faede5b4" containerID="fa9a983cac0cc98a0ce96cd56f0783a4d06a5c6912ee0a43cd2df86b765c0e3c" exitCode=0 Feb 16 13:18:49 crc kubenswrapper[4816]: I0216 13:18:49.715247 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2x65" event={"ID":"13bf1400-800d-4ec5-ad7d-af42faede5b4","Type":"ContainerDied","Data":"fa9a983cac0cc98a0ce96cd56f0783a4d06a5c6912ee0a43cd2df86b765c0e3c"} Feb 16 13:18:49 crc kubenswrapper[4816]: I0216 13:18:49.717423 4816 generic.go:334] "Generic (PLEG): container finished" podID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerID="e8b9c05952b933acb23f8e0df8969913fa443ea13f651c91f2a394a390b9827c" exitCode=0 Feb 16 13:18:49 crc kubenswrapper[4816]: I0216 13:18:49.717467 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" event={"ID":"cf420361-538f-4de6-8e0b-09bda2ae4d4c","Type":"ContainerDied","Data":"e8b9c05952b933acb23f8e0df8969913fa443ea13f651c91f2a394a390b9827c"} Feb 16 13:18:49 crc kubenswrapper[4816]: I0216 13:18:49.717495 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" event={"ID":"cf420361-538f-4de6-8e0b-09bda2ae4d4c","Type":"ContainerStarted","Data":"e5c512dd59f98741c8a373d0c6e186a1181d7a43409620496630a8ae8ae4ef2c"} Feb 16 13:18:50 crc kubenswrapper[4816]: I0216 13:18:50.733625 4816 generic.go:334] "Generic (PLEG): container finished" podID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerID="0d0509eebc7764b9a8dde6c3aaa29e81eca573b50ce0b1c79f004decb79e4080" exitCode=0 Feb 16 13:18:50 crc kubenswrapper[4816]: I0216 13:18:50.734676 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" event={"ID":"cf420361-538f-4de6-8e0b-09bda2ae4d4c","Type":"ContainerDied","Data":"0d0509eebc7764b9a8dde6c3aaa29e81eca573b50ce0b1c79f004decb79e4080"} Feb 16 13:18:50 crc kubenswrapper[4816]: I0216 13:18:50.737574 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b2x65" event={"ID":"13bf1400-800d-4ec5-ad7d-af42faede5b4","Type":"ContainerStarted","Data":"b79fe65cafab12d1af8d148125814af730aa067603d13f4f412d93eb54e8b00a"} Feb 16 13:18:50 crc kubenswrapper[4816]: I0216 13:18:50.779550 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b2x65" podStartSLOduration=3.326825313 podStartE2EDuration="8.779529409s" podCreationTimestamp="2026-02-16 13:18:42 +0000 UTC" firstStartedPulling="2026-02-16 13:18:44.67731867 +0000 UTC m=+924.004032408" lastFinishedPulling="2026-02-16 13:18:50.130022776 +0000 UTC m=+929.456736504" observedRunningTime="2026-02-16 13:18:50.776008763 +0000 UTC m=+930.102722511" watchObservedRunningTime="2026-02-16 13:18:50.779529409 +0000 UTC m=+930.106243137" Feb 16 13:18:51 crc kubenswrapper[4816]: I0216 13:18:51.743871 4816 generic.go:334] "Generic (PLEG): container finished" podID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerID="c074745dc4007126f40af57434b4002712a6173fb20e2afba4b4081d5acb2d8e" exitCode=0 Feb 16 13:18:51 crc kubenswrapper[4816]: I0216 13:18:51.743940 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" event={"ID":"cf420361-538f-4de6-8e0b-09bda2ae4d4c","Type":"ContainerDied","Data":"c074745dc4007126f40af57434b4002712a6173fb20e2afba4b4081d5acb2d8e"} Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.077022 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.150906 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.150956 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.196384 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.243685 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-util\") pod \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.243746 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njbpl\" (UniqueName: \"kubernetes.io/projected/cf420361-538f-4de6-8e0b-09bda2ae4d4c-kube-api-access-njbpl\") pod \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.243807 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-bundle\") pod \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\" (UID: \"cf420361-538f-4de6-8e0b-09bda2ae4d4c\") " Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.247109 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-bundle" (OuterVolumeSpecName: "bundle") pod "cf420361-538f-4de6-8e0b-09bda2ae4d4c" (UID: "cf420361-538f-4de6-8e0b-09bda2ae4d4c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.251383 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf420361-538f-4de6-8e0b-09bda2ae4d4c-kube-api-access-njbpl" (OuterVolumeSpecName: "kube-api-access-njbpl") pod "cf420361-538f-4de6-8e0b-09bda2ae4d4c" (UID: "cf420361-538f-4de6-8e0b-09bda2ae4d4c"). InnerVolumeSpecName "kube-api-access-njbpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.263252 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-util" (OuterVolumeSpecName: "util") pod "cf420361-538f-4de6-8e0b-09bda2ae4d4c" (UID: "cf420361-538f-4de6-8e0b-09bda2ae4d4c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.352484 4816 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-util\") on node \"crc\" DevicePath \"\"" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.352532 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njbpl\" (UniqueName: \"kubernetes.io/projected/cf420361-538f-4de6-8e0b-09bda2ae4d4c-kube-api-access-njbpl\") on node \"crc\" DevicePath \"\"" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.352566 4816 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cf420361-538f-4de6-8e0b-09bda2ae4d4c-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.762090 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" event={"ID":"cf420361-538f-4de6-8e0b-09bda2ae4d4c","Type":"ContainerDied","Data":"e5c512dd59f98741c8a373d0c6e186a1181d7a43409620496630a8ae8ae4ef2c"} Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.762150 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5c512dd59f98741c8a373d0c6e186a1181d7a43409620496630a8ae8ae4ef2c" Feb 16 13:18:53 crc kubenswrapper[4816]: I0216 13:18:53.762120 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.512787 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2"] Feb 16 13:18:57 crc kubenswrapper[4816]: E0216 13:18:57.513695 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerName="extract" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.513716 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerName="extract" Feb 16 13:18:57 crc kubenswrapper[4816]: E0216 13:18:57.513735 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerName="pull" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.513746 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerName="pull" Feb 16 13:18:57 crc kubenswrapper[4816]: E0216 13:18:57.513761 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerName="util" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.513769 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerName="util" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.513927 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf420361-538f-4de6-8e0b-09bda2ae4d4c" containerName="extract" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.514799 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.517057 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-fp57z" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.537165 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2"] Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.610738 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v9jw\" (UniqueName: \"kubernetes.io/projected/1633ec9f-6f03-436b-a464-386fefc9ea22-kube-api-access-6v9jw\") pod \"openstack-operator-controller-init-7f746469c7-2pqn2\" (UID: \"1633ec9f-6f03-436b-a464-386fefc9ea22\") " pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.712508 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v9jw\" (UniqueName: \"kubernetes.io/projected/1633ec9f-6f03-436b-a464-386fefc9ea22-kube-api-access-6v9jw\") pod \"openstack-operator-controller-init-7f746469c7-2pqn2\" (UID: \"1633ec9f-6f03-436b-a464-386fefc9ea22\") " pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.731079 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v9jw\" (UniqueName: \"kubernetes.io/projected/1633ec9f-6f03-436b-a464-386fefc9ea22-kube-api-access-6v9jw\") pod \"openstack-operator-controller-init-7f746469c7-2pqn2\" (UID: \"1633ec9f-6f03-436b-a464-386fefc9ea22\") " pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" Feb 16 13:18:57 crc kubenswrapper[4816]: I0216 13:18:57.835446 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" Feb 16 13:18:58 crc kubenswrapper[4816]: I0216 13:18:58.254112 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2"] Feb 16 13:18:58 crc kubenswrapper[4816]: W0216 13:18:58.254438 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1633ec9f_6f03_436b_a464_386fefc9ea22.slice/crio-5908b02b22061d9843b8d0a5d03942a916c60a622aca051663fdd78104f4d1f5 WatchSource:0}: Error finding container 5908b02b22061d9843b8d0a5d03942a916c60a622aca051663fdd78104f4d1f5: Status 404 returned error can't find the container with id 5908b02b22061d9843b8d0a5d03942a916c60a622aca051663fdd78104f4d1f5 Feb 16 13:18:58 crc kubenswrapper[4816]: I0216 13:18:58.798189 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" event={"ID":"1633ec9f-6f03-436b-a464-386fefc9ea22","Type":"ContainerStarted","Data":"5908b02b22061d9843b8d0a5d03942a916c60a622aca051663fdd78104f4d1f5"} Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.409620 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rtwl8"] Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.412355 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.422003 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rtwl8"] Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.600098 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-utilities\") pod \"certified-operators-rtwl8\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.600177 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7zwk\" (UniqueName: \"kubernetes.io/projected/c433fb3f-2c3d-438a-bd2b-b43789096da0-kube-api-access-h7zwk\") pod \"certified-operators-rtwl8\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.600204 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-catalog-content\") pod \"certified-operators-rtwl8\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.702215 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-utilities\") pod \"certified-operators-rtwl8\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.702463 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7zwk\" (UniqueName: \"kubernetes.io/projected/c433fb3f-2c3d-438a-bd2b-b43789096da0-kube-api-access-h7zwk\") pod \"certified-operators-rtwl8\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.702484 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-catalog-content\") pod \"certified-operators-rtwl8\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.703003 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-utilities\") pod \"certified-operators-rtwl8\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.703195 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-catalog-content\") pod \"certified-operators-rtwl8\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.724963 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7zwk\" (UniqueName: \"kubernetes.io/projected/c433fb3f-2c3d-438a-bd2b-b43789096da0-kube-api-access-h7zwk\") pod \"certified-operators-rtwl8\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.772431 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.828143 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" event={"ID":"1633ec9f-6f03-436b-a464-386fefc9ea22","Type":"ContainerStarted","Data":"b536dece202eea22565b2e00ad2ff0887c0e0cee66722301fb53d60a44e783d8"} Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.829205 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" Feb 16 13:19:02 crc kubenswrapper[4816]: I0216 13:19:02.871301 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" podStartSLOduration=1.501781577 podStartE2EDuration="5.871282313s" podCreationTimestamp="2026-02-16 13:18:57 +0000 UTC" firstStartedPulling="2026-02-16 13:18:58.256498981 +0000 UTC m=+937.583212709" lastFinishedPulling="2026-02-16 13:19:02.625999717 +0000 UTC m=+941.952713445" observedRunningTime="2026-02-16 13:19:02.866293808 +0000 UTC m=+942.193007536" watchObservedRunningTime="2026-02-16 13:19:02.871282313 +0000 UTC m=+942.197996041" Feb 16 13:19:03 crc kubenswrapper[4816]: I0216 13:19:03.205947 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b2x65" Feb 16 13:19:03 crc kubenswrapper[4816]: I0216 13:19:03.216010 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rtwl8"] Feb 16 13:19:03 crc kubenswrapper[4816]: W0216 13:19:03.220822 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc433fb3f_2c3d_438a_bd2b_b43789096da0.slice/crio-c10e06623d1597f458e2b9fc2d4c0380833fdf7b04b7ccc8eab5c69baefdd0c5 WatchSource:0}: Error finding container c10e06623d1597f458e2b9fc2d4c0380833fdf7b04b7ccc8eab5c69baefdd0c5: Status 404 returned error can't find the container with id c10e06623d1597f458e2b9fc2d4c0380833fdf7b04b7ccc8eab5c69baefdd0c5 Feb 16 13:19:03 crc kubenswrapper[4816]: E0216 13:19:03.504416 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc433fb3f_2c3d_438a_bd2b_b43789096da0.slice/crio-conmon-6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc433fb3f_2c3d_438a_bd2b_b43789096da0.slice/crio-6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db.scope\": RecentStats: unable to find data in memory cache]" Feb 16 13:19:03 crc kubenswrapper[4816]: I0216 13:19:03.835958 4816 generic.go:334] "Generic (PLEG): container finished" podID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerID="6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db" exitCode=0 Feb 16 13:19:03 crc kubenswrapper[4816]: I0216 13:19:03.837081 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rtwl8" event={"ID":"c433fb3f-2c3d-438a-bd2b-b43789096da0","Type":"ContainerDied","Data":"6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db"} Feb 16 13:19:03 crc kubenswrapper[4816]: I0216 13:19:03.837141 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rtwl8" event={"ID":"c433fb3f-2c3d-438a-bd2b-b43789096da0","Type":"ContainerStarted","Data":"c10e06623d1597f458e2b9fc2d4c0380833fdf7b04b7ccc8eab5c69baefdd0c5"} Feb 16 13:19:06 crc kubenswrapper[4816]: I0216 13:19:06.238491 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b2x65"] Feb 16 13:19:06 crc kubenswrapper[4816]: I0216 13:19:06.798238 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-krkwb"] Feb 16 13:19:06 crc kubenswrapper[4816]: I0216 13:19:06.798764 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-krkwb" podUID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerName="registry-server" containerID="cri-o://c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8" gracePeriod=2 Feb 16 13:19:06 crc kubenswrapper[4816]: I0216 13:19:06.855428 4816 generic.go:334] "Generic (PLEG): container finished" podID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerID="f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a" exitCode=0 Feb 16 13:19:06 crc kubenswrapper[4816]: I0216 13:19:06.855478 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rtwl8" event={"ID":"c433fb3f-2c3d-438a-bd2b-b43789096da0","Type":"ContainerDied","Data":"f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a"} Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.139416 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.302355 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-utilities\") pod \"d2519664-9d4a-43d0-847e-ffdb49a03d02\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.302445 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-catalog-content\") pod \"d2519664-9d4a-43d0-847e-ffdb49a03d02\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.302498 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwgvk\" (UniqueName: \"kubernetes.io/projected/d2519664-9d4a-43d0-847e-ffdb49a03d02-kube-api-access-gwgvk\") pod \"d2519664-9d4a-43d0-847e-ffdb49a03d02\" (UID: \"d2519664-9d4a-43d0-847e-ffdb49a03d02\") " Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.304282 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-utilities" (OuterVolumeSpecName: "utilities") pod "d2519664-9d4a-43d0-847e-ffdb49a03d02" (UID: "d2519664-9d4a-43d0-847e-ffdb49a03d02"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.317329 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2519664-9d4a-43d0-847e-ffdb49a03d02-kube-api-access-gwgvk" (OuterVolumeSpecName: "kube-api-access-gwgvk") pod "d2519664-9d4a-43d0-847e-ffdb49a03d02" (UID: "d2519664-9d4a-43d0-847e-ffdb49a03d02"). InnerVolumeSpecName "kube-api-access-gwgvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.361525 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2519664-9d4a-43d0-847e-ffdb49a03d02" (UID: "d2519664-9d4a-43d0-847e-ffdb49a03d02"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.406065 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwgvk\" (UniqueName: \"kubernetes.io/projected/d2519664-9d4a-43d0-847e-ffdb49a03d02-kube-api-access-gwgvk\") on node \"crc\" DevicePath \"\"" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.406090 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.406099 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2519664-9d4a-43d0-847e-ffdb49a03d02-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.838212 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-7f746469c7-2pqn2" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.864532 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rtwl8" event={"ID":"c433fb3f-2c3d-438a-bd2b-b43789096da0","Type":"ContainerStarted","Data":"68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08"} Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.867238 4816 generic.go:334] "Generic (PLEG): container finished" podID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerID="c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8" exitCode=0 Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.867298 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-krkwb" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.867317 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkwb" event={"ID":"d2519664-9d4a-43d0-847e-ffdb49a03d02","Type":"ContainerDied","Data":"c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8"} Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.867687 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-krkwb" event={"ID":"d2519664-9d4a-43d0-847e-ffdb49a03d02","Type":"ContainerDied","Data":"0715673ea8133207bac679caaf2490360f37f4eced02e377811ab167b0be4e1d"} Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.867710 4816 scope.go:117] "RemoveContainer" containerID="c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.890586 4816 scope.go:117] "RemoveContainer" containerID="7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.910680 4816 scope.go:117] "RemoveContainer" containerID="1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.917892 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rtwl8" podStartSLOduration=2.487756846 podStartE2EDuration="5.917872581s" podCreationTimestamp="2026-02-16 13:19:02 +0000 UTC" firstStartedPulling="2026-02-16 13:19:03.838390578 +0000 UTC m=+943.165104306" lastFinishedPulling="2026-02-16 13:19:07.268506313 +0000 UTC m=+946.595220041" observedRunningTime="2026-02-16 13:19:07.917128401 +0000 UTC m=+947.243842159" watchObservedRunningTime="2026-02-16 13:19:07.917872581 +0000 UTC m=+947.244586309" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.942027 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-krkwb"] Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.942297 4816 scope.go:117] "RemoveContainer" containerID="c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.948776 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-krkwb"] Feb 16 13:19:07 crc kubenswrapper[4816]: E0216 13:19:07.949233 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8\": container with ID starting with c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8 not found: ID does not exist" containerID="c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.949272 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8"} err="failed to get container status \"c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8\": rpc error: code = NotFound desc = could not find container \"c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8\": container with ID starting with c208d30db2ccbcd3f423d7db3559ee24ae68eac9a39e67892f952d87269d33d8 not found: ID does not exist" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.949300 4816 scope.go:117] "RemoveContainer" containerID="7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab" Feb 16 13:19:07 crc kubenswrapper[4816]: E0216 13:19:07.949723 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab\": container with ID starting with 7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab not found: ID does not exist" containerID="7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.949756 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab"} err="failed to get container status \"7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab\": rpc error: code = NotFound desc = could not find container \"7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab\": container with ID starting with 7403bd67e9742d33e1f13e281ef15772e150e3b28209a1d9ccc236fa53e33fab not found: ID does not exist" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.949776 4816 scope.go:117] "RemoveContainer" containerID="1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a" Feb 16 13:19:07 crc kubenswrapper[4816]: E0216 13:19:07.950065 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a\": container with ID starting with 1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a not found: ID does not exist" containerID="1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a" Feb 16 13:19:07 crc kubenswrapper[4816]: I0216 13:19:07.950096 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a"} err="failed to get container status \"1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a\": rpc error: code = NotFound desc = could not find container \"1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a\": container with ID starting with 1a35b00f60ec406ea5b0f6047180a4d95b7099b4e1096081bf47d3b143bc9a0a not found: ID does not exist" Feb 16 13:19:09 crc kubenswrapper[4816]: I0216 13:19:09.405247 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2519664-9d4a-43d0-847e-ffdb49a03d02" path="/var/lib/kubelet/pods/d2519664-9d4a-43d0-847e-ffdb49a03d02/volumes" Feb 16 13:19:12 crc kubenswrapper[4816]: I0216 13:19:12.773088 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:12 crc kubenswrapper[4816]: I0216 13:19:12.773392 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:12 crc kubenswrapper[4816]: I0216 13:19:12.848575 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:12 crc kubenswrapper[4816]: I0216 13:19:12.946344 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:14 crc kubenswrapper[4816]: I0216 13:19:14.196537 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rtwl8"] Feb 16 13:19:14 crc kubenswrapper[4816]: I0216 13:19:14.922025 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rtwl8" podUID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerName="registry-server" containerID="cri-o://68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08" gracePeriod=2 Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.283422 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.412748 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7zwk\" (UniqueName: \"kubernetes.io/projected/c433fb3f-2c3d-438a-bd2b-b43789096da0-kube-api-access-h7zwk\") pod \"c433fb3f-2c3d-438a-bd2b-b43789096da0\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.412885 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-utilities\") pod \"c433fb3f-2c3d-438a-bd2b-b43789096da0\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.413020 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-catalog-content\") pod \"c433fb3f-2c3d-438a-bd2b-b43789096da0\" (UID: \"c433fb3f-2c3d-438a-bd2b-b43789096da0\") " Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.416327 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-utilities" (OuterVolumeSpecName: "utilities") pod "c433fb3f-2c3d-438a-bd2b-b43789096da0" (UID: "c433fb3f-2c3d-438a-bd2b-b43789096da0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.419544 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c433fb3f-2c3d-438a-bd2b-b43789096da0-kube-api-access-h7zwk" (OuterVolumeSpecName: "kube-api-access-h7zwk") pod "c433fb3f-2c3d-438a-bd2b-b43789096da0" (UID: "c433fb3f-2c3d-438a-bd2b-b43789096da0"). InnerVolumeSpecName "kube-api-access-h7zwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.473045 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c433fb3f-2c3d-438a-bd2b-b43789096da0" (UID: "c433fb3f-2c3d-438a-bd2b-b43789096da0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.514780 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.514816 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7zwk\" (UniqueName: \"kubernetes.io/projected/c433fb3f-2c3d-438a-bd2b-b43789096da0-kube-api-access-h7zwk\") on node \"crc\" DevicePath \"\"" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.514829 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433fb3f-2c3d-438a-bd2b-b43789096da0-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.930770 4816 generic.go:334] "Generic (PLEG): container finished" podID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerID="68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08" exitCode=0 Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.931144 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rtwl8" event={"ID":"c433fb3f-2c3d-438a-bd2b-b43789096da0","Type":"ContainerDied","Data":"68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08"} Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.931189 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rtwl8" event={"ID":"c433fb3f-2c3d-438a-bd2b-b43789096da0","Type":"ContainerDied","Data":"c10e06623d1597f458e2b9fc2d4c0380833fdf7b04b7ccc8eab5c69baefdd0c5"} Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.931211 4816 scope.go:117] "RemoveContainer" containerID="68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.931351 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rtwl8" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.958858 4816 scope.go:117] "RemoveContainer" containerID="f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a" Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.962122 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rtwl8"] Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.968390 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rtwl8"] Feb 16 13:19:15 crc kubenswrapper[4816]: I0216 13:19:15.979440 4816 scope.go:117] "RemoveContainer" containerID="6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db" Feb 16 13:19:16 crc kubenswrapper[4816]: I0216 13:19:16.008337 4816 scope.go:117] "RemoveContainer" containerID="68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08" Feb 16 13:19:16 crc kubenswrapper[4816]: E0216 13:19:16.008878 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08\": container with ID starting with 68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08 not found: ID does not exist" containerID="68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08" Feb 16 13:19:16 crc kubenswrapper[4816]: I0216 13:19:16.008904 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08"} err="failed to get container status \"68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08\": rpc error: code = NotFound desc = could not find container \"68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08\": container with ID starting with 68fbb3b0c65bc28a3f7c39bba6e98ebfda9fa2be218ecbb549b4e5eeada38c08 not found: ID does not exist" Feb 16 13:19:16 crc kubenswrapper[4816]: I0216 13:19:16.008923 4816 scope.go:117] "RemoveContainer" containerID="f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a" Feb 16 13:19:16 crc kubenswrapper[4816]: E0216 13:19:16.009131 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a\": container with ID starting with f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a not found: ID does not exist" containerID="f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a" Feb 16 13:19:16 crc kubenswrapper[4816]: I0216 13:19:16.009147 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a"} err="failed to get container status \"f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a\": rpc error: code = NotFound desc = could not find container \"f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a\": container with ID starting with f7d2cd0d41169ae592137656158acc5145317e71531597994fa9cad0d8c15c2a not found: ID does not exist" Feb 16 13:19:16 crc kubenswrapper[4816]: I0216 13:19:16.009160 4816 scope.go:117] "RemoveContainer" containerID="6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db" Feb 16 13:19:16 crc kubenswrapper[4816]: E0216 13:19:16.009358 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db\": container with ID starting with 6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db not found: ID does not exist" containerID="6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db" Feb 16 13:19:16 crc kubenswrapper[4816]: I0216 13:19:16.009373 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db"} err="failed to get container status \"6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db\": rpc error: code = NotFound desc = could not find container \"6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db\": container with ID starting with 6c8cedb65693087bc18e4a82f41905e4cfb59cd1021cf2bb6d3c5c2f1237e5db not found: ID does not exist" Feb 16 13:19:17 crc kubenswrapper[4816]: I0216 13:19:17.406282 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c433fb3f-2c3d-438a-bd2b-b43789096da0" path="/var/lib/kubelet/pods/c433fb3f-2c3d-438a-bd2b-b43789096da0/volumes" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.294119 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7"] Feb 16 13:19:26 crc kubenswrapper[4816]: E0216 13:19:26.294991 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerName="extract-content" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.295012 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerName="extract-content" Feb 16 13:19:26 crc kubenswrapper[4816]: E0216 13:19:26.295026 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerName="registry-server" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.295034 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerName="registry-server" Feb 16 13:19:26 crc kubenswrapper[4816]: E0216 13:19:26.295047 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerName="extract-utilities" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.295055 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerName="extract-utilities" Feb 16 13:19:26 crc kubenswrapper[4816]: E0216 13:19:26.295072 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerName="extract-utilities" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.295079 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerName="extract-utilities" Feb 16 13:19:26 crc kubenswrapper[4816]: E0216 13:19:26.295088 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerName="extract-content" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.295096 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerName="extract-content" Feb 16 13:19:26 crc kubenswrapper[4816]: E0216 13:19:26.295113 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerName="registry-server" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.295121 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerName="registry-server" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.295263 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2519664-9d4a-43d0-847e-ffdb49a03d02" containerName="registry-server" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.295277 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c433fb3f-2c3d-438a-bd2b-b43789096da0" containerName="registry-server" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.295794 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.298043 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-8mlzp" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.302287 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.303096 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.305102 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-tzdj7" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.308484 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.321010 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.326491 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.327541 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.329629 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-45rvc" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.347504 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.374436 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.375162 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.379668 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-2qlbb" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.397053 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.406338 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.415059 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-4jk5d" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.424720 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.425553 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.434258 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-hg6k4" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.450795 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.467350 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5p46\" (UniqueName: \"kubernetes.io/projected/04a6d824-7601-4884-9b45-4a9d7b5154af-kube-api-access-r5p46\") pod \"barbican-operator-controller-manager-868647ff47-f8lx7\" (UID: \"04a6d824-7601-4884-9b45-4a9d7b5154af\") " pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.467617 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fjfd\" (UniqueName: \"kubernetes.io/projected/509907ed-a471-4584-b564-a281e4ef6d72-kube-api-access-4fjfd\") pod \"designate-operator-controller-manager-6d8bf5c495-vpfg8\" (UID: \"509907ed-a471-4584-b564-a281e4ef6d72\") " pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.467712 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5txf2\" (UniqueName: \"kubernetes.io/projected/3088a0a0-c2b4-42a5-8411-f966d8abb311-kube-api-access-5txf2\") pod \"cinder-operator-controller-manager-5d946d989d-zgztq\" (UID: \"3088a0a0-c2b4-42a5-8411-f966d8abb311\") " pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.526665 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.576180 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.577508 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.580258 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-pls5l" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.580422 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.587144 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.591930 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.594162 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88mvn\" (UniqueName: \"kubernetes.io/projected/12ae487a-61f4-46f6-835a-a9beb1b66fc5-kube-api-access-88mvn\") pod \"horizon-operator-controller-manager-5b9b8895d5-7jg9h\" (UID: \"12ae487a-61f4-46f6-835a-a9beb1b66fc5\") " pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.594286 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g922m\" (UniqueName: \"kubernetes.io/projected/23786cf0-20d5-45c1-8081-3b0e7ac9fd1a-kube-api-access-g922m\") pod \"glance-operator-controller-manager-77987464f4-2qsmj\" (UID: \"23786cf0-20d5-45c1-8081-3b0e7ac9fd1a\") " pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.594347 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fjfd\" (UniqueName: \"kubernetes.io/projected/509907ed-a471-4584-b564-a281e4ef6d72-kube-api-access-4fjfd\") pod \"designate-operator-controller-manager-6d8bf5c495-vpfg8\" (UID: \"509907ed-a471-4584-b564-a281e4ef6d72\") " pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.594385 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd2gx\" (UniqueName: \"kubernetes.io/projected/b6606fb3-5f90-403d-9730-10fa7c420e51-kube-api-access-zd2gx\") pod \"heat-operator-controller-manager-69f49c598c-pxnx2\" (UID: \"b6606fb3-5f90-403d-9730-10fa7c420e51\") " pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.594418 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5txf2\" (UniqueName: \"kubernetes.io/projected/3088a0a0-c2b4-42a5-8411-f966d8abb311-kube-api-access-5txf2\") pod \"cinder-operator-controller-manager-5d946d989d-zgztq\" (UID: \"3088a0a0-c2b4-42a5-8411-f966d8abb311\") " pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.594448 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5p46\" (UniqueName: \"kubernetes.io/projected/04a6d824-7601-4884-9b45-4a9d7b5154af-kube-api-access-r5p46\") pod \"barbican-operator-controller-manager-868647ff47-f8lx7\" (UID: \"04a6d824-7601-4884-9b45-4a9d7b5154af\") " pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.598716 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.605596 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.615444 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-fstjb" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.642726 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.650906 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.651281 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5txf2\" (UniqueName: \"kubernetes.io/projected/3088a0a0-c2b4-42a5-8411-f966d8abb311-kube-api-access-5txf2\") pod \"cinder-operator-controller-manager-5d946d989d-zgztq\" (UID: \"3088a0a0-c2b4-42a5-8411-f966d8abb311\") " pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.651692 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.674462 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.679033 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-j55gw" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.680131 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fjfd\" (UniqueName: \"kubernetes.io/projected/509907ed-a471-4584-b564-a281e4ef6d72-kube-api-access-4fjfd\") pod \"designate-operator-controller-manager-6d8bf5c495-vpfg8\" (UID: \"509907ed-a471-4584-b564-a281e4ef6d72\") " pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.682289 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.690360 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5p46\" (UniqueName: \"kubernetes.io/projected/04a6d824-7601-4884-9b45-4a9d7b5154af-kube-api-access-r5p46\") pod \"barbican-operator-controller-manager-868647ff47-f8lx7\" (UID: \"04a6d824-7601-4884-9b45-4a9d7b5154af\") " pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.690571 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.691399 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.700317 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88mvn\" (UniqueName: \"kubernetes.io/projected/12ae487a-61f4-46f6-835a-a9beb1b66fc5-kube-api-access-88mvn\") pod \"horizon-operator-controller-manager-5b9b8895d5-7jg9h\" (UID: \"12ae487a-61f4-46f6-835a-a9beb1b66fc5\") " pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.700382 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.700407 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g922m\" (UniqueName: \"kubernetes.io/projected/23786cf0-20d5-45c1-8081-3b0e7ac9fd1a-kube-api-access-g922m\") pod \"glance-operator-controller-manager-77987464f4-2qsmj\" (UID: \"23786cf0-20d5-45c1-8081-3b0e7ac9fd1a\") " pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.700449 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlwhc\" (UniqueName: \"kubernetes.io/projected/88fcb741-6fee-4112-a1c7-5badac51848a-kube-api-access-zlwhc\") pod \"ironic-operator-controller-manager-554564d7fc-nqxbf\" (UID: \"88fcb741-6fee-4112-a1c7-5badac51848a\") " pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.700469 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd2gx\" (UniqueName: \"kubernetes.io/projected/b6606fb3-5f90-403d-9730-10fa7c420e51-kube-api-access-zd2gx\") pod \"heat-operator-controller-manager-69f49c598c-pxnx2\" (UID: \"b6606fb3-5f90-403d-9730-10fa7c420e51\") " pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.700507 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npr5r\" (UniqueName: \"kubernetes.io/projected/2b1caddd-fa9b-49de-bada-8cbeb89882e7-kube-api-access-npr5r\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.704711 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.714076 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-mlxpl" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.714342 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-bsvtn" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.732712 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.737378 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd2gx\" (UniqueName: \"kubernetes.io/projected/b6606fb3-5f90-403d-9730-10fa7c420e51-kube-api-access-zd2gx\") pod \"heat-operator-controller-manager-69f49c598c-pxnx2\" (UID: \"b6606fb3-5f90-403d-9730-10fa7c420e51\") " pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.745710 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.746741 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.748346 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88mvn\" (UniqueName: \"kubernetes.io/projected/12ae487a-61f4-46f6-835a-a9beb1b66fc5-kube-api-access-88mvn\") pod \"horizon-operator-controller-manager-5b9b8895d5-7jg9h\" (UID: \"12ae487a-61f4-46f6-835a-a9beb1b66fc5\") " pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.751591 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g922m\" (UniqueName: \"kubernetes.io/projected/23786cf0-20d5-45c1-8081-3b0e7ac9fd1a-kube-api-access-g922m\") pod \"glance-operator-controller-manager-77987464f4-2qsmj\" (UID: \"23786cf0-20d5-45c1-8081-3b0e7ac9fd1a\") " pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.753500 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.763255 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-zzzfb" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.779919 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.779969 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.780996 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.797463 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-rlk28" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.797984 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.806003 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.806388 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwsn6\" (UniqueName: \"kubernetes.io/projected/de3a103e-4d92-4cc1-a7c2-5ab7c14da448-kube-api-access-jwsn6\") pod \"mariadb-operator-controller-manager-6994f66f48-ssbf6\" (UID: \"de3a103e-4d92-4cc1-a7c2-5ab7c14da448\") " pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.806446 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxl2s\" (UniqueName: \"kubernetes.io/projected/e0aa9860-8969-41d6-8cdf-c71c2aa4c167-kube-api-access-lxl2s\") pod \"keystone-operator-controller-manager-b4d948c87-pxkc2\" (UID: \"e0aa9860-8969-41d6-8cdf-c71c2aa4c167\") " pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.806482 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.806543 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlwhc\" (UniqueName: \"kubernetes.io/projected/88fcb741-6fee-4112-a1c7-5badac51848a-kube-api-access-zlwhc\") pod \"ironic-operator-controller-manager-554564d7fc-nqxbf\" (UID: \"88fcb741-6fee-4112-a1c7-5badac51848a\") " pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.806584 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nkh9\" (UniqueName: \"kubernetes.io/projected/636c0f15-1128-4cca-a9fa-b2e5a58607d4-kube-api-access-6nkh9\") pod \"manila-operator-controller-manager-54f6768c69-5fh52\" (UID: \"636c0f15-1128-4cca-a9fa-b2e5a58607d4\") " pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.806626 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npr5r\" (UniqueName: \"kubernetes.io/projected/2b1caddd-fa9b-49de-bada-8cbeb89882e7-kube-api-access-npr5r\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:26 crc kubenswrapper[4816]: E0216 13:19:26.807089 4816 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:26 crc kubenswrapper[4816]: E0216 13:19:26.807134 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert podName:2b1caddd-fa9b-49de-bada-8cbeb89882e7 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:27.307118721 +0000 UTC m=+966.633832449 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert") pod "infra-operator-controller-manager-79d975b745-rx9cb" (UID: "2b1caddd-fa9b-49de-bada-8cbeb89882e7") : secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.807492 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.823768 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-4t4s8" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.839391 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npr5r\" (UniqueName: \"kubernetes.io/projected/2b1caddd-fa9b-49de-bada-8cbeb89882e7-kube-api-access-npr5r\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.842735 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.859860 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlwhc\" (UniqueName: \"kubernetes.io/projected/88fcb741-6fee-4112-a1c7-5badac51848a-kube-api-access-zlwhc\") pod \"ironic-operator-controller-manager-554564d7fc-nqxbf\" (UID: \"88fcb741-6fee-4112-a1c7-5badac51848a\") " pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.894148 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.909288 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vncp9\" (UniqueName: \"kubernetes.io/projected/c76b64f0-d1e3-4018-b694-958755f15cbe-kube-api-access-vncp9\") pod \"nova-operator-controller-manager-567668f5cf-m8ppz\" (UID: \"c76b64f0-d1e3-4018-b694-958755f15cbe\") " pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.909349 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbjnq\" (UniqueName: \"kubernetes.io/projected/d9f341ed-962a-4faa-bdad-ffec10941d95-kube-api-access-hbjnq\") pod \"octavia-operator-controller-manager-69f8888797-4lc7x\" (UID: \"d9f341ed-962a-4faa-bdad-ffec10941d95\") " pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.909370 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ngn6\" (UniqueName: \"kubernetes.io/projected/8773a01d-7f52-46e1-bbaa-92d6d385e41b-kube-api-access-8ngn6\") pod \"neutron-operator-controller-manager-64ddbf8bb-5msw2\" (UID: \"8773a01d-7f52-46e1-bbaa-92d6d385e41b\") " pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.909417 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nkh9\" (UniqueName: \"kubernetes.io/projected/636c0f15-1128-4cca-a9fa-b2e5a58607d4-kube-api-access-6nkh9\") pod \"manila-operator-controller-manager-54f6768c69-5fh52\" (UID: \"636c0f15-1128-4cca-a9fa-b2e5a58607d4\") " pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.909586 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwsn6\" (UniqueName: \"kubernetes.io/projected/de3a103e-4d92-4cc1-a7c2-5ab7c14da448-kube-api-access-jwsn6\") pod \"mariadb-operator-controller-manager-6994f66f48-ssbf6\" (UID: \"de3a103e-4d92-4cc1-a7c2-5ab7c14da448\") " pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.909666 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxl2s\" (UniqueName: \"kubernetes.io/projected/e0aa9860-8969-41d6-8cdf-c71c2aa4c167-kube-api-access-lxl2s\") pod \"keystone-operator-controller-manager-b4d948c87-pxkc2\" (UID: \"e0aa9860-8969-41d6-8cdf-c71c2aa4c167\") " pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.915496 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.924078 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.929840 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.931076 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.933265 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.934087 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nkh9\" (UniqueName: \"kubernetes.io/projected/636c0f15-1128-4cca-a9fa-b2e5a58607d4-kube-api-access-6nkh9\") pod \"manila-operator-controller-manager-54f6768c69-5fh52\" (UID: \"636c0f15-1128-4cca-a9fa-b2e5a58607d4\") " pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.934483 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwsn6\" (UniqueName: \"kubernetes.io/projected/de3a103e-4d92-4cc1-a7c2-5ab7c14da448-kube-api-access-jwsn6\") pod \"mariadb-operator-controller-manager-6994f66f48-ssbf6\" (UID: \"de3a103e-4d92-4cc1-a7c2-5ab7c14da448\") " pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.939864 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxl2s\" (UniqueName: \"kubernetes.io/projected/e0aa9860-8969-41d6-8cdf-c71c2aa4c167-kube-api-access-lxl2s\") pod \"keystone-operator-controller-manager-b4d948c87-pxkc2\" (UID: \"e0aa9860-8969-41d6-8cdf-c71c2aa4c167\") " pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.940950 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.942155 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.945073 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-s2g4v" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.950770 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.952950 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-c956x" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.953551 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.953926 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.972393 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.973464 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.976620 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-lw9zb" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.984497 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.990759 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn"] Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.992011 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" Feb 16 13:19:26 crc kubenswrapper[4816]: I0216 13:19:26.994300 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-9cmt2" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.017332 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vncp9\" (UniqueName: \"kubernetes.io/projected/c76b64f0-d1e3-4018-b694-958755f15cbe-kube-api-access-vncp9\") pod \"nova-operator-controller-manager-567668f5cf-m8ppz\" (UID: \"c76b64f0-d1e3-4018-b694-958755f15cbe\") " pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.018224 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbjnq\" (UniqueName: \"kubernetes.io/projected/d9f341ed-962a-4faa-bdad-ffec10941d95-kube-api-access-hbjnq\") pod \"octavia-operator-controller-manager-69f8888797-4lc7x\" (UID: \"d9f341ed-962a-4faa-bdad-ffec10941d95\") " pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.024171 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.018242 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ngn6\" (UniqueName: \"kubernetes.io/projected/8773a01d-7f52-46e1-bbaa-92d6d385e41b-kube-api-access-8ngn6\") pod \"neutron-operator-controller-manager-64ddbf8bb-5msw2\" (UID: \"8773a01d-7f52-46e1-bbaa-92d6d385e41b\") " pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.026103 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.039919 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.058288 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.059400 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbjnq\" (UniqueName: \"kubernetes.io/projected/d9f341ed-962a-4faa-bdad-ffec10941d95-kube-api-access-hbjnq\") pod \"octavia-operator-controller-manager-69f8888797-4lc7x\" (UID: \"d9f341ed-962a-4faa-bdad-ffec10941d95\") " pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.060339 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.061550 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ngn6\" (UniqueName: \"kubernetes.io/projected/8773a01d-7f52-46e1-bbaa-92d6d385e41b-kube-api-access-8ngn6\") pod \"neutron-operator-controller-manager-64ddbf8bb-5msw2\" (UID: \"8773a01d-7f52-46e1-bbaa-92d6d385e41b\") " pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.064374 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vncp9\" (UniqueName: \"kubernetes.io/projected/c76b64f0-d1e3-4018-b694-958755f15cbe-kube-api-access-vncp9\") pod \"nova-operator-controller-manager-567668f5cf-m8ppz\" (UID: \"c76b64f0-d1e3-4018-b694-958755f15cbe\") " pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.124719 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.127208 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-7866795846-xpjn9"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.128487 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqlrp\" (UniqueName: \"kubernetes.io/projected/0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66-kube-api-access-nqlrp\") pod \"ovn-operator-controller-manager-d44cf6b75-8f9p2\" (UID: \"0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66\") " pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.128606 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24z9k\" (UniqueName: \"kubernetes.io/projected/76800815-dfe9-4b57-beaf-7d9817688213-kube-api-access-24z9k\") pod \"placement-operator-controller-manager-8497b45c89-xt5jt\" (UID: \"76800815-dfe9-4b57-beaf-7d9817688213\") " pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.128639 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-966dw\" (UniqueName: \"kubernetes.io/projected/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-kube-api-access-966dw\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.128699 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.128769 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hr6s\" (UniqueName: \"kubernetes.io/projected/dae97001-293e-4307-8e11-86a9bb275b85-kube-api-access-4hr6s\") pod \"swift-operator-controller-manager-68f46476f-hpwxn\" (UID: \"dae97001-293e-4307-8e11-86a9bb275b85\") " pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.129786 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.140949 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-27ppx" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.167273 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.168352 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.184295 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-rwh4s" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.184821 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.209186 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-7866795846-xpjn9"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.212883 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.218011 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.228293 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.229875 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hr6s\" (UniqueName: \"kubernetes.io/projected/dae97001-293e-4307-8e11-86a9bb275b85-kube-api-access-4hr6s\") pod \"swift-operator-controller-manager-68f46476f-hpwxn\" (UID: \"dae97001-293e-4307-8e11-86a9bb275b85\") " pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.229947 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqlrp\" (UniqueName: \"kubernetes.io/projected/0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66-kube-api-access-nqlrp\") pod \"ovn-operator-controller-manager-d44cf6b75-8f9p2\" (UID: \"0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66\") " pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.229987 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24z9k\" (UniqueName: \"kubernetes.io/projected/76800815-dfe9-4b57-beaf-7d9817688213-kube-api-access-24z9k\") pod \"placement-operator-controller-manager-8497b45c89-xt5jt\" (UID: \"76800815-dfe9-4b57-beaf-7d9817688213\") " pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.230007 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-966dw\" (UniqueName: \"kubernetes.io/projected/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-kube-api-access-966dw\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.230025 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.230058 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqfnf\" (UniqueName: \"kubernetes.io/projected/7008605c-3daf-47aa-8c93-4f5b58a5c406-kube-api-access-wqfnf\") pod \"test-operator-controller-manager-7866795846-xpjn9\" (UID: \"7008605c-3daf-47aa-8c93-4f5b58a5c406\") " pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.232413 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.233205 4816 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.233269 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert podName:ee88d6f1-148e-4a18-ae88-4bdda1df4d65 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:27.733248673 +0000 UTC m=+967.059962401 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" (UID: "ee88d6f1-148e-4a18-ae88-4bdda1df4d65") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.236272 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-mmfl4" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.258684 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24z9k\" (UniqueName: \"kubernetes.io/projected/76800815-dfe9-4b57-beaf-7d9817688213-kube-api-access-24z9k\") pod \"placement-operator-controller-manager-8497b45c89-xt5jt\" (UID: \"76800815-dfe9-4b57-beaf-7d9817688213\") " pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.259111 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqlrp\" (UniqueName: \"kubernetes.io/projected/0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66-kube-api-access-nqlrp\") pod \"ovn-operator-controller-manager-d44cf6b75-8f9p2\" (UID: \"0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66\") " pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.268091 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.268187 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hr6s\" (UniqueName: \"kubernetes.io/projected/dae97001-293e-4307-8e11-86a9bb275b85-kube-api-access-4hr6s\") pod \"swift-operator-controller-manager-68f46476f-hpwxn\" (UID: \"dae97001-293e-4307-8e11-86a9bb275b85\") " pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.273839 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-966dw\" (UniqueName: \"kubernetes.io/projected/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-kube-api-access-966dw\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.298286 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.310966 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.313082 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.314550 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.317825 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.317991 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-qgqbz" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.318097 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.323757 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.330978 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.331836 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.332911 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqfnf\" (UniqueName: \"kubernetes.io/projected/7008605c-3daf-47aa-8c93-4f5b58a5c406-kube-api-access-wqfnf\") pod \"test-operator-controller-manager-7866795846-xpjn9\" (UID: \"7008605c-3daf-47aa-8c93-4f5b58a5c406\") " pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.332963 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5grrg\" (UniqueName: \"kubernetes.io/projected/e9e1a6a3-c57f-4bd4-b626-a5a2fc9a079e-kube-api-access-5grrg\") pod \"telemetry-operator-controller-manager-7f45b4ff68-v5zp6\" (UID: \"e9e1a6a3-c57f-4bd4-b626-a5a2fc9a079e\") " pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.333013 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzrps\" (UniqueName: \"kubernetes.io/projected/b23139ab-59af-4013-8d83-067804821ab2-kube-api-access-mzrps\") pod \"watcher-operator-controller-manager-5db88f68c-htckm\" (UID: \"b23139ab-59af-4013-8d83-067804821ab2\") " pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.333056 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.333149 4816 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.333191 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert podName:2b1caddd-fa9b-49de-bada-8cbeb89882e7 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:28.333177379 +0000 UTC m=+967.659891107 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert") pod "infra-operator-controller-manager-79d975b745-rx9cb" (UID: "2b1caddd-fa9b-49de-bada-8cbeb89882e7") : secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.334506 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-n525j" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.345724 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.362901 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqfnf\" (UniqueName: \"kubernetes.io/projected/7008605c-3daf-47aa-8c93-4f5b58a5c406-kube-api-access-wqfnf\") pod \"test-operator-controller-manager-7866795846-xpjn9\" (UID: \"7008605c-3daf-47aa-8c93-4f5b58a5c406\") " pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.386854 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2"] Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.406501 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.425917 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.434432 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74q9f\" (UniqueName: \"kubernetes.io/projected/bf9c19c6-7076-4c47-872d-92639392fe05-kube-api-access-74q9f\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.434690 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzrps\" (UniqueName: \"kubernetes.io/projected/b23139ab-59af-4013-8d83-067804821ab2-kube-api-access-mzrps\") pod \"watcher-operator-controller-manager-5db88f68c-htckm\" (UID: \"b23139ab-59af-4013-8d83-067804821ab2\") " pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.434735 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.434789 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.434839 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5grrg\" (UniqueName: \"kubernetes.io/projected/e9e1a6a3-c57f-4bd4-b626-a5a2fc9a079e-kube-api-access-5grrg\") pod \"telemetry-operator-controller-manager-7f45b4ff68-v5zp6\" (UID: \"e9e1a6a3-c57f-4bd4-b626-a5a2fc9a079e\") " pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.434858 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsx8p\" (UniqueName: \"kubernetes.io/projected/bce4fbe9-0339-4bef-b723-2ab711bb41df-kube-api-access-zsx8p\") pod \"rabbitmq-cluster-operator-manager-668c99d594-n7pnd\" (UID: \"bce4fbe9-0339-4bef-b723-2ab711bb41df\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.457820 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5grrg\" (UniqueName: \"kubernetes.io/projected/e9e1a6a3-c57f-4bd4-b626-a5a2fc9a079e-kube-api-access-5grrg\") pod \"telemetry-operator-controller-manager-7f45b4ff68-v5zp6\" (UID: \"e9e1a6a3-c57f-4bd4-b626-a5a2fc9a079e\") " pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.457979 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.469434 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzrps\" (UniqueName: \"kubernetes.io/projected/b23139ab-59af-4013-8d83-067804821ab2-kube-api-access-mzrps\") pod \"watcher-operator-controller-manager-5db88f68c-htckm\" (UID: \"b23139ab-59af-4013-8d83-067804821ab2\") " pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.485050 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.504044 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.547810 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.547935 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsx8p\" (UniqueName: \"kubernetes.io/projected/bce4fbe9-0339-4bef-b723-2ab711bb41df-kube-api-access-zsx8p\") pod \"rabbitmq-cluster-operator-manager-668c99d594-n7pnd\" (UID: \"bce4fbe9-0339-4bef-b723-2ab711bb41df\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.547980 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74q9f\" (UniqueName: \"kubernetes.io/projected/bf9c19c6-7076-4c47-872d-92639392fe05-kube-api-access-74q9f\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.548070 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.548255 4816 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.548302 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:28.048286655 +0000 UTC m=+967.375000383 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "metrics-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.548491 4816 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.548557 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:28.048539732 +0000 UTC m=+967.375253460 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "webhook-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.576292 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74q9f\" (UniqueName: \"kubernetes.io/projected/bf9c19c6-7076-4c47-872d-92639392fe05-kube-api-access-74q9f\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.576596 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsx8p\" (UniqueName: \"kubernetes.io/projected/bce4fbe9-0339-4bef-b723-2ab711bb41df-kube-api-access-zsx8p\") pod \"rabbitmq-cluster-operator-manager-668c99d594-n7pnd\" (UID: \"bce4fbe9-0339-4bef-b723-2ab711bb41df\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.634079 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.728928 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.751895 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.752155 4816 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: E0216 13:19:27.752215 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert podName:ee88d6f1-148e-4a18-ae88-4bdda1df4d65 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:28.752197688 +0000 UTC m=+968.078911406 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" (UID: "ee88d6f1-148e-4a18-ae88-4bdda1df4d65") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:27 crc kubenswrapper[4816]: I0216 13:19:27.767283 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7"] Feb 16 13:19:27 crc kubenswrapper[4816]: W0216 13:19:27.791143 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04a6d824_7601_4884_9b45_4a9d7b5154af.slice/crio-feb3cbe77c7498d1c7b60e65271d7a6315625bc4a5667d93518eff3965691622 WatchSource:0}: Error finding container feb3cbe77c7498d1c7b60e65271d7a6315625bc4a5667d93518eff3965691622: Status 404 returned error can't find the container with id feb3cbe77c7498d1c7b60e65271d7a6315625bc4a5667d93518eff3965691622 Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.051040 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.056195 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.056287 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.056483 4816 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.056549 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:29.056531538 +0000 UTC m=+968.383245266 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "webhook-server-cert" not found Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.056880 4816 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.056977 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:29.05695947 +0000 UTC m=+968.383673198 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "metrics-server-cert" not found Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.076147 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" event={"ID":"04a6d824-7601-4884-9b45-4a9d7b5154af","Type":"ContainerStarted","Data":"feb3cbe77c7498d1c7b60e65271d7a6315625bc4a5667d93518eff3965691622"} Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.078468 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" event={"ID":"b6606fb3-5f90-403d-9730-10fa7c420e51","Type":"ContainerStarted","Data":"9df38b4ba3152d4f6126c430a55a9fc70fbb4f90c3532d1af7e8fd8836d1855c"} Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.123198 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.130435 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.145696 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.359493 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.359759 4816 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.359843 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert podName:2b1caddd-fa9b-49de-bada-8cbeb89882e7 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:30.359821701 +0000 UTC m=+969.686535459 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert") pod "infra-operator-controller-manager-79d975b745-rx9cb" (UID: "2b1caddd-fa9b-49de-bada-8cbeb89882e7") : secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.505205 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.520948 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.542908 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h"] Feb 16 13:19:28 crc kubenswrapper[4816]: W0216 13:19:28.553479 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12ae487a_61f4_46f6_835a_a9beb1b66fc5.slice/crio-e1450f99a6e912be7f807fa1a6e085da3b05ef8acc09cd612f6fd25f62726d46 WatchSource:0}: Error finding container e1450f99a6e912be7f807fa1a6e085da3b05ef8acc09cd612f6fd25f62726d46: Status 404 returned error can't find the container with id e1450f99a6e912be7f807fa1a6e085da3b05ef8acc09cd612f6fd25f62726d46 Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.553901 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.558989 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.563628 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2"] Feb 16 13:19:28 crc kubenswrapper[4816]: W0216 13:19:28.563776 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod636c0f15_1128_4cca_a9fa_b2e5a58607d4.slice/crio-5c053cf5fac74c4ea7c3da9d1df525f02f5b8efda90fb23a4101f5de4a2fe945 WatchSource:0}: Error finding container 5c053cf5fac74c4ea7c3da9d1df525f02f5b8efda90fb23a4101f5de4a2fe945: Status 404 returned error can't find the container with id 5c053cf5fac74c4ea7c3da9d1df525f02f5b8efda90fb23a4101f5de4a2fe945 Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.694269 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn"] Feb 16 13:19:28 crc kubenswrapper[4816]: W0216 13:19:28.709942 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbce4fbe9_0339_4bef_b723_2ab711bb41df.slice/crio-8f4587cfd2b3ac73a573b6b3ede8a0d99f66c43ab0c7f3347877b7ab8b81ef7d WatchSource:0}: Error finding container 8f4587cfd2b3ac73a573b6b3ede8a0d99f66c43ab0c7f3347877b7ab8b81ef7d: Status 404 returned error can't find the container with id 8f4587cfd2b3ac73a573b6b3ede8a0d99f66c43ab0c7f3347877b7ab8b81ef7d Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.725240 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zsx8p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-n7pnd_openstack-operators(bce4fbe9-0339-4bef-b723-2ab711bb41df): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.726592 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" podUID="bce4fbe9-0339-4bef-b723-2ab711bb41df" Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.730157 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.734642 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.740045 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm"] Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.744576 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:d01ae848290e880c09127d5297418dea40fc7f090fdab9bf2c578c7e7f53aec0,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mzrps,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-5db88f68c-htckm_openstack-operators(b23139ab-59af-4013-8d83-067804821ab2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.745705 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" podUID="b23139ab-59af-4013-8d83-067804821ab2" Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.780810 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.780958 4816 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.781044 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert podName:ee88d6f1-148e-4a18-ae88-4bdda1df4d65 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:30.781023219 +0000 UTC m=+970.107736947 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" (UID: "ee88d6f1-148e-4a18-ae88-4bdda1df4d65") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.859118 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.880246 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-7866795846-xpjn9"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.884305 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz"] Feb 16 13:19:28 crc kubenswrapper[4816]: I0216 13:19:28.898641 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2"] Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.928047 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:f0fabdf79095def0f8b1c0442925548a94ca94bed4de2d3b171277129f8079e6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wqfnf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-7866795846-xpjn9_openstack-operators(7008605c-3daf-47aa-8c93-4f5b58a5c406): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.928267 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:fe85dd595906fac0fe1e7a42215bb306a963cf87d55e07cd2573726b690b2838,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vncp9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-567668f5cf-m8ppz_openstack-operators(c76b64f0-d1e3-4018-b694-958755f15cbe): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.929146 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" podUID="7008605c-3daf-47aa-8c93-4f5b58a5c406" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.929388 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" podUID="c76b64f0-d1e3-4018-b694-958755f15cbe" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.929753 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:543c103838f3e6ef48755665a7695dfa3ed84753c557560257d265db31f92759,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nqlrp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-d44cf6b75-8f9p2_openstack-operators(0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 16 13:19:28 crc kubenswrapper[4816]: E0216 13:19:28.931037 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" podUID="0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66" Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.091199 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" event={"ID":"509907ed-a471-4584-b564-a281e4ef6d72","Type":"ContainerStarted","Data":"b536049a83ac92a07d80980c8ab15001853fc9120390119af6febfdca27b7e4a"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.091992 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.092082 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:29 crc kubenswrapper[4816]: E0216 13:19:29.092174 4816 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 16 13:19:29 crc kubenswrapper[4816]: E0216 13:19:29.092205 4816 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 16 13:19:29 crc kubenswrapper[4816]: E0216 13:19:29.092245 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:31.092227707 +0000 UTC m=+970.418941435 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "metrics-server-cert" not found Feb 16 13:19:29 crc kubenswrapper[4816]: E0216 13:19:29.092259 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:31.092254518 +0000 UTC m=+970.418968246 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "webhook-server-cert" not found Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.093088 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" event={"ID":"c76b64f0-d1e3-4018-b694-958755f15cbe","Type":"ContainerStarted","Data":"ac94d2cfc75d07276a1ed0c0bce27e3ae13781ff14202d4e62e1abebb2ded214"} Feb 16 13:19:29 crc kubenswrapper[4816]: E0216 13:19:29.096095 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:fe85dd595906fac0fe1e7a42215bb306a963cf87d55e07cd2573726b690b2838\\\"\"" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" podUID="c76b64f0-d1e3-4018-b694-958755f15cbe" Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.100837 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" event={"ID":"b23139ab-59af-4013-8d83-067804821ab2","Type":"ContainerStarted","Data":"704c0a5f530693c9be4fe79192261cfa7101cd86a7345ac6401032ce510e5f16"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.102431 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" event={"ID":"3088a0a0-c2b4-42a5-8411-f966d8abb311","Type":"ContainerStarted","Data":"f2231a3a79ab610e62a61b33f7d5ab46a13f9a5179b625ba0b82aacfb408e54b"} Feb 16 13:19:29 crc kubenswrapper[4816]: E0216 13:19:29.103630 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:d01ae848290e880c09127d5297418dea40fc7f090fdab9bf2c578c7e7f53aec0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" podUID="b23139ab-59af-4013-8d83-067804821ab2" Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.108064 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" event={"ID":"d9f341ed-962a-4faa-bdad-ffec10941d95","Type":"ContainerStarted","Data":"28b85e217d142fce576c8112ba071f3aa82304b5c595fc0955731acbf0beac33"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.122741 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" event={"ID":"e9e1a6a3-c57f-4bd4-b626-a5a2fc9a079e","Type":"ContainerStarted","Data":"793b183849d54238736bf4f689df7f26acac94d0a1ac97ffad8537774bfca913"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.130226 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" event={"ID":"23786cf0-20d5-45c1-8081-3b0e7ac9fd1a","Type":"ContainerStarted","Data":"e1a5350237d766fc305389a5bddd25eaea5941980b33ee5eff14b4af610f3912"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.144174 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" event={"ID":"12ae487a-61f4-46f6-835a-a9beb1b66fc5","Type":"ContainerStarted","Data":"e1450f99a6e912be7f807fa1a6e085da3b05ef8acc09cd612f6fd25f62726d46"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.145849 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" event={"ID":"8773a01d-7f52-46e1-bbaa-92d6d385e41b","Type":"ContainerStarted","Data":"d844ef72a7540371a1d6b81102de0443462e9707933770655b77b2fff634d60f"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.147221 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" event={"ID":"7008605c-3daf-47aa-8c93-4f5b58a5c406","Type":"ContainerStarted","Data":"be1f8d9ae6a7f1f1ebf2d3ff4f4e5daafea87ff11f1f3e19102b7a61951c23f9"} Feb 16 13:19:29 crc kubenswrapper[4816]: E0216 13:19:29.149703 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:f0fabdf79095def0f8b1c0442925548a94ca94bed4de2d3b171277129f8079e6\\\"\"" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" podUID="7008605c-3daf-47aa-8c93-4f5b58a5c406" Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.160572 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" event={"ID":"0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66","Type":"ContainerStarted","Data":"2dd28cc9b2f540b912536ab203d68f28a06d854b9950b116c1ec8d12a138d5ec"} Feb 16 13:19:29 crc kubenswrapper[4816]: E0216 13:19:29.167918 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:543c103838f3e6ef48755665a7695dfa3ed84753c557560257d265db31f92759\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" podUID="0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66" Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.176995 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" event={"ID":"636c0f15-1128-4cca-a9fa-b2e5a58607d4","Type":"ContainerStarted","Data":"5c053cf5fac74c4ea7c3da9d1df525f02f5b8efda90fb23a4101f5de4a2fe945"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.181031 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" event={"ID":"de3a103e-4d92-4cc1-a7c2-5ab7c14da448","Type":"ContainerStarted","Data":"2dac7dcdd87300d53ff77394bc303198b269e7793b296b84e72b81803b07ac53"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.182813 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" event={"ID":"e0aa9860-8969-41d6-8cdf-c71c2aa4c167","Type":"ContainerStarted","Data":"a77079010bf3aabb053d7a41ad07c2117ea68e85b2a041dabdbd79ca1ec114c3"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.184415 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" event={"ID":"dae97001-293e-4307-8e11-86a9bb275b85","Type":"ContainerStarted","Data":"a20fd1882818991d606cbb94367dca84d38dc202c3694d53dc9e1312fc318901"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.187779 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" event={"ID":"bce4fbe9-0339-4bef-b723-2ab711bb41df","Type":"ContainerStarted","Data":"8f4587cfd2b3ac73a573b6b3ede8a0d99f66c43ab0c7f3347877b7ab8b81ef7d"} Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.199114 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" event={"ID":"88fcb741-6fee-4112-a1c7-5badac51848a","Type":"ContainerStarted","Data":"c0815789b79ebb868225c8254df0b775350e5d6cbbfa89b892316d40a8b01e65"} Feb 16 13:19:29 crc kubenswrapper[4816]: E0216 13:19:29.204445 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" podUID="bce4fbe9-0339-4bef-b723-2ab711bb41df" Feb 16 13:19:29 crc kubenswrapper[4816]: I0216 13:19:29.217965 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" event={"ID":"76800815-dfe9-4b57-beaf-7d9817688213","Type":"ContainerStarted","Data":"b662df89c49922c06555e30cd26a127df4bb93aa2bdf3189776b821509e4b157"} Feb 16 13:19:30 crc kubenswrapper[4816]: E0216 13:19:30.308572 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:fe85dd595906fac0fe1e7a42215bb306a963cf87d55e07cd2573726b690b2838\\\"\"" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" podUID="c76b64f0-d1e3-4018-b694-958755f15cbe" Feb 16 13:19:30 crc kubenswrapper[4816]: E0216 13:19:30.308694 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:d01ae848290e880c09127d5297418dea40fc7f090fdab9bf2c578c7e7f53aec0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" podUID="b23139ab-59af-4013-8d83-067804821ab2" Feb 16 13:19:30 crc kubenswrapper[4816]: E0216 13:19:30.309174 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" podUID="bce4fbe9-0339-4bef-b723-2ab711bb41df" Feb 16 13:19:30 crc kubenswrapper[4816]: E0216 13:19:30.309187 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:f0fabdf79095def0f8b1c0442925548a94ca94bed4de2d3b171277129f8079e6\\\"\"" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" podUID="7008605c-3daf-47aa-8c93-4f5b58a5c406" Feb 16 13:19:30 crc kubenswrapper[4816]: E0216 13:19:30.309230 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:543c103838f3e6ef48755665a7695dfa3ed84753c557560257d265db31f92759\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" podUID="0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66" Feb 16 13:19:30 crc kubenswrapper[4816]: I0216 13:19:30.416790 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:30 crc kubenswrapper[4816]: E0216 13:19:30.417525 4816 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:30 crc kubenswrapper[4816]: E0216 13:19:30.417576 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert podName:2b1caddd-fa9b-49de-bada-8cbeb89882e7 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:34.417560528 +0000 UTC m=+973.744274256 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert") pod "infra-operator-controller-manager-79d975b745-rx9cb" (UID: "2b1caddd-fa9b-49de-bada-8cbeb89882e7") : secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:30 crc kubenswrapper[4816]: I0216 13:19:30.825525 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:30 crc kubenswrapper[4816]: E0216 13:19:30.826043 4816 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:30 crc kubenswrapper[4816]: E0216 13:19:30.826107 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert podName:ee88d6f1-148e-4a18-ae88-4bdda1df4d65 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:34.826088661 +0000 UTC m=+974.152802389 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" (UID: "ee88d6f1-148e-4a18-ae88-4bdda1df4d65") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:31 crc kubenswrapper[4816]: I0216 13:19:31.349852 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:31 crc kubenswrapper[4816]: I0216 13:19:31.350002 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:31 crc kubenswrapper[4816]: E0216 13:19:31.350172 4816 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 16 13:19:31 crc kubenswrapper[4816]: E0216 13:19:31.350219 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:35.350204355 +0000 UTC m=+974.676918083 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "metrics-server-cert" not found Feb 16 13:19:31 crc kubenswrapper[4816]: E0216 13:19:31.350599 4816 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 16 13:19:31 crc kubenswrapper[4816]: E0216 13:19:31.350628 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:35.350620147 +0000 UTC m=+974.677333875 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "webhook-server-cert" not found Feb 16 13:19:34 crc kubenswrapper[4816]: I0216 13:19:34.420113 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:34 crc kubenswrapper[4816]: E0216 13:19:34.420681 4816 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:34 crc kubenswrapper[4816]: E0216 13:19:34.420735 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert podName:2b1caddd-fa9b-49de-bada-8cbeb89882e7 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:42.420720127 +0000 UTC m=+981.747433855 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert") pod "infra-operator-controller-manager-79d975b745-rx9cb" (UID: "2b1caddd-fa9b-49de-bada-8cbeb89882e7") : secret "infra-operator-webhook-server-cert" not found Feb 16 13:19:34 crc kubenswrapper[4816]: I0216 13:19:34.925437 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:34 crc kubenswrapper[4816]: E0216 13:19:34.925625 4816 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:34 crc kubenswrapper[4816]: E0216 13:19:34.925710 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert podName:ee88d6f1-148e-4a18-ae88-4bdda1df4d65 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:42.925692251 +0000 UTC m=+982.252405979 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" (UID: "ee88d6f1-148e-4a18-ae88-4bdda1df4d65") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 16 13:19:35 crc kubenswrapper[4816]: I0216 13:19:35.431488 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:35 crc kubenswrapper[4816]: I0216 13:19:35.431620 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:35 crc kubenswrapper[4816]: E0216 13:19:35.431771 4816 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 16 13:19:35 crc kubenswrapper[4816]: E0216 13:19:35.431845 4816 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 16 13:19:35 crc kubenswrapper[4816]: E0216 13:19:35.431875 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:43.431846948 +0000 UTC m=+982.758560716 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "metrics-server-cert" not found Feb 16 13:19:35 crc kubenswrapper[4816]: E0216 13:19:35.431926 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs podName:bf9c19c6-7076-4c47-872d-92639392fe05 nodeName:}" failed. No retries permitted until 2026-02-16 13:19:43.43190549 +0000 UTC m=+982.758619228 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs") pod "openstack-operator-controller-manager-5cd688d8fc-w74kn" (UID: "bf9c19c6-7076-4c47-872d-92639392fe05") : secret "webhook-server-cert" not found Feb 16 13:19:42 crc kubenswrapper[4816]: I0216 13:19:42.476504 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:42 crc kubenswrapper[4816]: I0216 13:19:42.503077 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b1caddd-fa9b-49de-bada-8cbeb89882e7-cert\") pod \"infra-operator-controller-manager-79d975b745-rx9cb\" (UID: \"2b1caddd-fa9b-49de-bada-8cbeb89882e7\") " pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:42 crc kubenswrapper[4816]: I0216 13:19:42.535849 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:19:42 crc kubenswrapper[4816]: I0216 13:19:42.982937 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:42 crc kubenswrapper[4816]: I0216 13:19:42.988558 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ee88d6f1-148e-4a18-ae88-4bdda1df4d65-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd\" (UID: \"ee88d6f1-148e-4a18-ae88-4bdda1df4d65\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:43 crc kubenswrapper[4816]: E0216 13:19:43.271317 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:c1e33e962043cd6e3d09ebd225cb72781451dba7af2d57522e5c6eedbdc91642" Feb 16 13:19:43 crc kubenswrapper[4816]: E0216 13:19:43.271567 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:c1e33e962043cd6e3d09ebd225cb72781451dba7af2d57522e5c6eedbdc91642,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4fjfd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-6d8bf5c495-vpfg8_openstack-operators(509907ed-a471-4584-b564-a281e4ef6d72): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:19:43 crc kubenswrapper[4816]: E0216 13:19:43.272726 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" podUID="509907ed-a471-4584-b564-a281e4ef6d72" Feb 16 13:19:43 crc kubenswrapper[4816]: I0216 13:19:43.276141 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:19:43 crc kubenswrapper[4816]: I0216 13:19:43.487433 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:43 crc kubenswrapper[4816]: I0216 13:19:43.487511 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:43 crc kubenswrapper[4816]: I0216 13:19:43.490800 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-metrics-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:43 crc kubenswrapper[4816]: I0216 13:19:43.503371 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf9c19c6-7076-4c47-872d-92639392fe05-webhook-certs\") pod \"openstack-operator-controller-manager-5cd688d8fc-w74kn\" (UID: \"bf9c19c6-7076-4c47-872d-92639392fe05\") " pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:43 crc kubenswrapper[4816]: E0216 13:19:43.510166 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:c1e33e962043cd6e3d09ebd225cb72781451dba7af2d57522e5c6eedbdc91642\\\"\"" pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" podUID="509907ed-a471-4584-b564-a281e4ef6d72" Feb 16 13:19:43 crc kubenswrapper[4816]: I0216 13:19:43.593443 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:19:43 crc kubenswrapper[4816]: E0216 13:19:43.984556 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:229fc8c8d94dd4102d2151cd4ec1eaaa09d897c2b396d06e903f61ea29c1fa34" Feb 16 13:19:43 crc kubenswrapper[4816]: E0216 13:19:43.985084 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:229fc8c8d94dd4102d2151cd4ec1eaaa09d897c2b396d06e903f61ea29c1fa34,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hbjnq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-69f8888797-4lc7x_openstack-operators(d9f341ed-962a-4faa-bdad-ffec10941d95): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:19:43 crc kubenswrapper[4816]: E0216 13:19:43.986255 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" podUID="d9f341ed-962a-4faa-bdad-ffec10941d95" Feb 16 13:19:44 crc kubenswrapper[4816]: E0216 13:19:44.516401 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:229fc8c8d94dd4102d2151cd4ec1eaaa09d897c2b396d06e903f61ea29c1fa34\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" podUID="d9f341ed-962a-4faa-bdad-ffec10941d95" Feb 16 13:19:44 crc kubenswrapper[4816]: E0216 13:19:44.724804 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:2b8ab3063af4aaeed0198197aae6f391c6647ac686c94c85668537f1d5933979" Feb 16 13:19:44 crc kubenswrapper[4816]: E0216 13:19:44.724989 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:2b8ab3063af4aaeed0198197aae6f391c6647ac686c94c85668537f1d5933979,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5txf2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-5d946d989d-zgztq_openstack-operators(3088a0a0-c2b4-42a5-8411-f966d8abb311): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:19:44 crc kubenswrapper[4816]: E0216 13:19:44.726180 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" podUID="3088a0a0-c2b4-42a5-8411-f966d8abb311" Feb 16 13:19:45 crc kubenswrapper[4816]: E0216 13:19:45.520704 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:2b8ab3063af4aaeed0198197aae6f391c6647ac686c94c85668537f1d5933979\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" podUID="3088a0a0-c2b4-42a5-8411-f966d8abb311" Feb 16 13:19:45 crc kubenswrapper[4816]: E0216 13:19:45.580860 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:a18f12497b7159b100fcfd72c7ba2273d0669a5c00600a9ff1333bca028f256a" Feb 16 13:19:45 crc kubenswrapper[4816]: E0216 13:19:45.581081 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:a18f12497b7159b100fcfd72c7ba2273d0669a5c00600a9ff1333bca028f256a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jwsn6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-6994f66f48-ssbf6_openstack-operators(de3a103e-4d92-4cc1-a7c2-5ab7c14da448): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:19:45 crc kubenswrapper[4816]: E0216 13:19:45.582444 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" podUID="de3a103e-4d92-4cc1-a7c2-5ab7c14da448" Feb 16 13:19:46 crc kubenswrapper[4816]: E0216 13:19:46.529495 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:a18f12497b7159b100fcfd72c7ba2273d0669a5c00600a9ff1333bca028f256a\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" podUID="de3a103e-4d92-4cc1-a7c2-5ab7c14da448" Feb 16 13:19:47 crc kubenswrapper[4816]: E0216 13:19:47.942552 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:3d676f1281e24ef07de617570d2f7fbf625032e41866d1551a856c052248bb04" Feb 16 13:19:47 crc kubenswrapper[4816]: E0216 13:19:47.943090 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3d676f1281e24ef07de617570d2f7fbf625032e41866d1551a856c052248bb04,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4hr6s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-68f46476f-hpwxn_openstack-operators(dae97001-293e-4307-8e11-86a9bb275b85): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:19:47 crc kubenswrapper[4816]: E0216 13:19:47.944692 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" podUID="dae97001-293e-4307-8e11-86a9bb275b85" Feb 16 13:19:48 crc kubenswrapper[4816]: E0216 13:19:48.557917 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3d676f1281e24ef07de617570d2f7fbf625032e41866d1551a856c052248bb04\\\"\"" pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" podUID="dae97001-293e-4307-8e11-86a9bb275b85" Feb 16 13:19:49 crc kubenswrapper[4816]: E0216 13:19:49.175022 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:1ab3ec59cd8e30dd8423e91ad832403bdefbae3b8ac47e15578d5a677d7ba0df" Feb 16 13:19:49 crc kubenswrapper[4816]: E0216 13:19:49.175209 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:1ab3ec59cd8e30dd8423e91ad832403bdefbae3b8ac47e15578d5a677d7ba0df,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g922m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987464f4-2qsmj_openstack-operators(23786cf0-20d5-45c1-8081-3b0e7ac9fd1a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:19:49 crc kubenswrapper[4816]: E0216 13:19:49.177333 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" podUID="23786cf0-20d5-45c1-8081-3b0e7ac9fd1a" Feb 16 13:19:49 crc kubenswrapper[4816]: E0216 13:19:49.564940 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:1ab3ec59cd8e30dd8423e91ad832403bdefbae3b8ac47e15578d5a677d7ba0df\\\"\"" pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" podUID="23786cf0-20d5-45c1-8081-3b0e7ac9fd1a" Feb 16 13:19:49 crc kubenswrapper[4816]: E0216 13:19:49.771828 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:c6ad383f55f955902b074d1ee947a2233a5fcbf40698479ae693ce056c80dcc1" Feb 16 13:19:49 crc kubenswrapper[4816]: E0216 13:19:49.772394 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:c6ad383f55f955902b074d1ee947a2233a5fcbf40698479ae693ce056c80dcc1,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lxl2s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-b4d948c87-pxkc2_openstack-operators(e0aa9860-8969-41d6-8cdf-c71c2aa4c167): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:19:49 crc kubenswrapper[4816]: E0216 13:19:49.773545 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" podUID="e0aa9860-8969-41d6-8cdf-c71c2aa4c167" Feb 16 13:19:50 crc kubenswrapper[4816]: E0216 13:19:50.365104 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:8fb0a33b8d93cf9f84f079af5f2ceb680afada4e44542514959146779f57f64c" Feb 16 13:19:50 crc kubenswrapper[4816]: E0216 13:19:50.365284 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:8fb0a33b8d93cf9f84f079af5f2ceb680afada4e44542514959146779f57f64c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6nkh9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-54f6768c69-5fh52_openstack-operators(636c0f15-1128-4cca-a9fa-b2e5a58607d4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:19:50 crc kubenswrapper[4816]: E0216 13:19:50.366492 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" podUID="636c0f15-1128-4cca-a9fa-b2e5a58607d4" Feb 16 13:19:50 crc kubenswrapper[4816]: E0216 13:19:50.590061 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:8fb0a33b8d93cf9f84f079af5f2ceb680afada4e44542514959146779f57f64c\\\"\"" pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" podUID="636c0f15-1128-4cca-a9fa-b2e5a58607d4" Feb 16 13:19:50 crc kubenswrapper[4816]: E0216 13:19:50.590865 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:c6ad383f55f955902b074d1ee947a2233a5fcbf40698479ae693ce056c80dcc1\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" podUID="e0aa9860-8969-41d6-8cdf-c71c2aa4c167" Feb 16 13:19:50 crc kubenswrapper[4816]: I0216 13:19:50.599692 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd"] Feb 16 13:19:54 crc kubenswrapper[4816]: I0216 13:19:54.679709 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" event={"ID":"ee88d6f1-148e-4a18-ae88-4bdda1df4d65","Type":"ContainerStarted","Data":"5689f9eeb70809c42bfeac9db4a2f8df69a3e914c08b3c34aa1d9fc3ed1d7b35"} Feb 16 13:19:55 crc kubenswrapper[4816]: I0216 13:19:55.463752 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb"] Feb 16 13:19:55 crc kubenswrapper[4816]: I0216 13:19:55.612091 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn"] Feb 16 13:19:56 crc kubenswrapper[4816]: W0216 13:19:56.311592 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b1caddd_fa9b_49de_bada_8cbeb89882e7.slice/crio-48951f83ddeeb92f5fbc942ed2933250d59a573f42b8230c592ceb07d464375e WatchSource:0}: Error finding container 48951f83ddeeb92f5fbc942ed2933250d59a573f42b8230c592ceb07d464375e: Status 404 returned error can't find the container with id 48951f83ddeeb92f5fbc942ed2933250d59a573f42b8230c592ceb07d464375e Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.703982 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" event={"ID":"12ae487a-61f4-46f6-835a-a9beb1b66fc5","Type":"ContainerStarted","Data":"32d0e97b315bcf77c49e4b0c9531c70d96bfa1f43483836118ae431560f7975b"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.704610 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.709205 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" event={"ID":"8773a01d-7f52-46e1-bbaa-92d6d385e41b","Type":"ContainerStarted","Data":"698f2f547dd52ae5844ec28be6dee98cb7d95974692523e642a1bad8438b1694"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.709898 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.715760 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" event={"ID":"b6606fb3-5f90-403d-9730-10fa7c420e51","Type":"ContainerStarted","Data":"179919f59d0fd4a987c495e9a5e3e2a1d854c84003b1213a63c577264314cc67"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.716482 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.724313 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" event={"ID":"e9e1a6a3-c57f-4bd4-b626-a5a2fc9a079e","Type":"ContainerStarted","Data":"b13dfdb3f6972dec7bec755e0842b1acb2a869fea9c37f2ac74ecccf57782420"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.724833 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.727039 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" podStartSLOduration=9.521900655 podStartE2EDuration="30.727024198s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.555346795 +0000 UTC m=+967.882060523" lastFinishedPulling="2026-02-16 13:19:49.760470338 +0000 UTC m=+989.087184066" observedRunningTime="2026-02-16 13:19:56.722971187 +0000 UTC m=+996.049684915" watchObservedRunningTime="2026-02-16 13:19:56.727024198 +0000 UTC m=+996.053737926" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.729141 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" event={"ID":"88fcb741-6fee-4112-a1c7-5badac51848a","Type":"ContainerStarted","Data":"9d4d6100c7c136432125799d3ee3d23b50678eaae1f5b56581e9fdf7818a056a"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.729213 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.732547 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" event={"ID":"c76b64f0-d1e3-4018-b694-958755f15cbe","Type":"ContainerStarted","Data":"8727c49fcd5a61eda69c3888fa966759de4b2d4bee137dce21aa26b8d3f85537"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.733111 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.735117 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" event={"ID":"bf9c19c6-7076-4c47-872d-92639392fe05","Type":"ContainerStarted","Data":"d544874933cb2119110556f1bc327c1db6c67417089322b84237c343f4fff472"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.744984 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" podStartSLOduration=7.745320781 podStartE2EDuration="30.744969445s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.548082268 +0000 UTC m=+967.874795996" lastFinishedPulling="2026-02-16 13:19:51.547730932 +0000 UTC m=+990.874444660" observedRunningTime="2026-02-16 13:19:56.743241078 +0000 UTC m=+996.069954806" watchObservedRunningTime="2026-02-16 13:19:56.744969445 +0000 UTC m=+996.071683173" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.746959 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" event={"ID":"76800815-dfe9-4b57-beaf-7d9817688213","Type":"ContainerStarted","Data":"e7d961b509a87095b6e056773e1745fa68b7239cd1c061b9c10799d00ad08bf8"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.747573 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.761255 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" event={"ID":"0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66","Type":"ContainerStarted","Data":"de44200bb64b07317457dd76cf8016e31a66c9b172bb6bb5075595e8a1f74dab"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.761891 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.774729 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" event={"ID":"04a6d824-7601-4884-9b45-4a9d7b5154af","Type":"ContainerStarted","Data":"ee042864de3fafde85eb08454c389d7a1103d7d5fdc5df848cf98788d636bd63"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.774801 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.777337 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" event={"ID":"2b1caddd-fa9b-49de-bada-8cbeb89882e7","Type":"ContainerStarted","Data":"48951f83ddeeb92f5fbc942ed2933250d59a573f42b8230c592ceb07d464375e"} Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.778475 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" podStartSLOduration=8.451833802 podStartE2EDuration="30.778458585s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:27.433889016 +0000 UTC m=+966.760602744" lastFinishedPulling="2026-02-16 13:19:49.760513789 +0000 UTC m=+989.087227527" observedRunningTime="2026-02-16 13:19:56.774440996 +0000 UTC m=+996.101154724" watchObservedRunningTime="2026-02-16 13:19:56.778458585 +0000 UTC m=+996.105172313" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.792018 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" podStartSLOduration=9.213873345 podStartE2EDuration="30.791999933s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.181102265 +0000 UTC m=+967.507815993" lastFinishedPulling="2026-02-16 13:19:49.759228863 +0000 UTC m=+989.085942581" observedRunningTime="2026-02-16 13:19:56.789022333 +0000 UTC m=+996.115736061" watchObservedRunningTime="2026-02-16 13:19:56.791999933 +0000 UTC m=+996.118713661" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.820104 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" podStartSLOduration=4.898144688 podStartE2EDuration="30.820085507s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.92788974 +0000 UTC m=+968.254603478" lastFinishedPulling="2026-02-16 13:19:54.849830569 +0000 UTC m=+994.176544297" observedRunningTime="2026-02-16 13:19:56.818250417 +0000 UTC m=+996.144964155" watchObservedRunningTime="2026-02-16 13:19:56.820085507 +0000 UTC m=+996.146799235" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.914068 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" podStartSLOduration=9.876751519 podStartE2EDuration="30.91405037s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.723267089 +0000 UTC m=+968.049980817" lastFinishedPulling="2026-02-16 13:19:49.76056594 +0000 UTC m=+989.087279668" observedRunningTime="2026-02-16 13:19:56.910552525 +0000 UTC m=+996.237266253" watchObservedRunningTime="2026-02-16 13:19:56.91405037 +0000 UTC m=+996.240764098" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.914598 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" podStartSLOduration=8.238487416 podStartE2EDuration="30.914593556s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.872533707 +0000 UTC m=+968.199247435" lastFinishedPulling="2026-02-16 13:19:51.548639857 +0000 UTC m=+990.875353575" observedRunningTime="2026-02-16 13:19:56.873154529 +0000 UTC m=+996.199868257" watchObservedRunningTime="2026-02-16 13:19:56.914593556 +0000 UTC m=+996.241307274" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.942231 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" podStartSLOduration=4.883908481 podStartE2EDuration="30.942217295s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.928164858 +0000 UTC m=+968.254878586" lastFinishedPulling="2026-02-16 13:19:54.986473672 +0000 UTC m=+994.313187400" observedRunningTime="2026-02-16 13:19:56.936939143 +0000 UTC m=+996.263652881" watchObservedRunningTime="2026-02-16 13:19:56.942217295 +0000 UTC m=+996.268931023" Feb 16 13:19:56 crc kubenswrapper[4816]: I0216 13:19:56.972775 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" podStartSLOduration=9.005453959 podStartE2EDuration="30.972754226s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:27.793218042 +0000 UTC m=+967.119931770" lastFinishedPulling="2026-02-16 13:19:49.760518309 +0000 UTC m=+989.087232037" observedRunningTime="2026-02-16 13:19:56.972141499 +0000 UTC m=+996.298855227" watchObservedRunningTime="2026-02-16 13:19:56.972754226 +0000 UTC m=+996.299467954" Feb 16 13:19:57 crc kubenswrapper[4816]: I0216 13:19:57.798865 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" event={"ID":"b23139ab-59af-4013-8d83-067804821ab2","Type":"ContainerStarted","Data":"b0e24381f28e33e137f119e41e5c88fe007b7be3c34098f764f0d143668564d7"} Feb 16 13:19:57 crc kubenswrapper[4816]: I0216 13:19:57.800085 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" Feb 16 13:19:57 crc kubenswrapper[4816]: I0216 13:19:57.801759 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" event={"ID":"509907ed-a471-4584-b564-a281e4ef6d72","Type":"ContainerStarted","Data":"7000d9df7292deb140054194292ad5d10c8612801c3bef079615dddccec005c4"} Feb 16 13:19:57 crc kubenswrapper[4816]: I0216 13:19:57.839540 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" podStartSLOduration=5.564545411 podStartE2EDuration="31.839523673s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.744434555 +0000 UTC m=+968.071148283" lastFinishedPulling="2026-02-16 13:19:55.019412817 +0000 UTC m=+994.346126545" observedRunningTime="2026-02-16 13:19:57.838916567 +0000 UTC m=+997.165630295" watchObservedRunningTime="2026-02-16 13:19:57.839523673 +0000 UTC m=+997.166237401" Feb 16 13:19:57 crc kubenswrapper[4816]: I0216 13:19:57.862498 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" podStartSLOduration=4.980691813 podStartE2EDuration="31.862482327s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.15444759 +0000 UTC m=+967.481161328" lastFinishedPulling="2026-02-16 13:19:55.036238114 +0000 UTC m=+994.362951842" observedRunningTime="2026-02-16 13:19:57.861423349 +0000 UTC m=+997.188137077" watchObservedRunningTime="2026-02-16 13:19:57.862482327 +0000 UTC m=+997.189196055" Feb 16 13:20:02 crc kubenswrapper[4816]: I0216 13:20:02.726974 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" event={"ID":"7008605c-3daf-47aa-8c93-4f5b58a5c406","Type":"ContainerStarted","Data":"b27bb269b01e3103e6277b9047035b81884c4ae931835343e6de21930e878ce9"} Feb 16 13:20:06 crc kubenswrapper[4816]: I0216 13:20:06.803056 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-69f49c598c-pxnx2" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:06.920484 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-868647ff47-f8lx7" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.093256 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.098987 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5b9b8895d5-7jg9h" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.104043 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.105077 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.107530 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-6d8bf5c495-vpfg8" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.130023 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-554564d7fc-nqxbf" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.302150 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64ddbf8bb-5msw2" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.313219 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-567668f5cf-m8ppz" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.409532 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-d44cf6b75-8f9p2" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.430958 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-8497b45c89-xt5jt" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.640918 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-5db88f68c-htckm" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.682931 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.761681 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" event={"ID":"bf9c19c6-7076-4c47-872d-92639392fe05","Type":"ContainerStarted","Data":"7f25b8b03ec2bcaafdcc98ef5857832c03f2a1fc2228458fdd64fabcc9dd28a2"} Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.762035 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.781239 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" podStartSLOduration=15.689681766 podStartE2EDuration="41.781220783s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.927905261 +0000 UTC m=+968.254618989" lastFinishedPulling="2026-02-16 13:19:55.019444278 +0000 UTC m=+994.346158006" observedRunningTime="2026-02-16 13:20:07.778091247 +0000 UTC m=+1007.104804975" watchObservedRunningTime="2026-02-16 13:20:07.781220783 +0000 UTC m=+1007.107934511" Feb 16 13:20:07 crc kubenswrapper[4816]: I0216 13:20:07.800956 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-7866795846-xpjn9" Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.100963 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-7f45b4ff68-v5zp6" Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.774808 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" event={"ID":"d9f341ed-962a-4faa-bdad-ffec10941d95","Type":"ContainerStarted","Data":"a13b5f6fb21c9c351bf72a8ff91464443d750168100482094d1bb97c943fed0d"} Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.776342 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.783499 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" event={"ID":"bce4fbe9-0339-4bef-b723-2ab711bb41df","Type":"ContainerStarted","Data":"42bd1c56214d4d89c023d4b7290cfbf21bf4e3298c3083317907f1efe7efe287"} Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.786474 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" event={"ID":"ee88d6f1-148e-4a18-ae88-4bdda1df4d65","Type":"ContainerStarted","Data":"a74dbd42ea4d0761604c8fd7cc74f052b4a36cd921f591ae075b1fc3b9591a43"} Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.787256 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.787637 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.833133 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" podStartSLOduration=3.687047292 podStartE2EDuration="42.833104241s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.540252435 +0000 UTC m=+967.866966163" lastFinishedPulling="2026-02-16 13:20:07.686309384 +0000 UTC m=+1007.013023112" observedRunningTime="2026-02-16 13:20:08.823921542 +0000 UTC m=+1008.150635280" watchObservedRunningTime="2026-02-16 13:20:08.833104241 +0000 UTC m=+1008.159817989" Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.877501 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" podStartSLOduration=29.154937561 podStartE2EDuration="42.877477977s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:53.962108002 +0000 UTC m=+993.288821730" lastFinishedPulling="2026-02-16 13:20:07.684648408 +0000 UTC m=+1007.011362146" observedRunningTime="2026-02-16 13:20:08.86063704 +0000 UTC m=+1008.187350788" watchObservedRunningTime="2026-02-16 13:20:08.877477977 +0000 UTC m=+1008.204191715" Feb 16 13:20:08 crc kubenswrapper[4816]: I0216 13:20:08.895770 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-n7pnd" podStartSLOduration=15.635360397 podStartE2EDuration="41.895741573s" podCreationTimestamp="2026-02-16 13:19:27 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.72512363 +0000 UTC m=+968.051837358" lastFinishedPulling="2026-02-16 13:19:54.985504806 +0000 UTC m=+994.312218534" observedRunningTime="2026-02-16 13:20:08.891098098 +0000 UTC m=+1008.217811826" watchObservedRunningTime="2026-02-16 13:20:08.895741573 +0000 UTC m=+1008.222455301" Feb 16 13:20:09 crc kubenswrapper[4816]: I0216 13:20:08.957424 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" podStartSLOduration=41.957394699 podStartE2EDuration="41.957394699s" podCreationTimestamp="2026-02-16 13:19:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:20:08.939689688 +0000 UTC m=+1008.266403416" watchObservedRunningTime="2026-02-16 13:20:08.957394699 +0000 UTC m=+1008.284108427" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.832699 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" event={"ID":"636c0f15-1128-4cca-a9fa-b2e5a58607d4","Type":"ContainerStarted","Data":"50f5bf39494cd0abbbdd3e6346a4fa9d7fa35a8c0267f7372666ff7052a3271a"} Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.834386 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.836725 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" event={"ID":"de3a103e-4d92-4cc1-a7c2-5ab7c14da448","Type":"ContainerStarted","Data":"fb9594346ee209df6625e272879f64f4e254505be25db1f1499ee4cce6759300"} Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.836935 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.839429 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" event={"ID":"e0aa9860-8969-41d6-8cdf-c71c2aa4c167","Type":"ContainerStarted","Data":"f657227164efc7f4206c76f3788e76191bc053bbdc05589632405f31310a7752"} Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.839621 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.841204 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" event={"ID":"3088a0a0-c2b4-42a5-8411-f966d8abb311","Type":"ContainerStarted","Data":"7a34f53a0588531743ac27831650dc6915e8fa5b79a93fdb650a6e13f36ab3f3"} Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.841420 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.842616 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" event={"ID":"23786cf0-20d5-45c1-8081-3b0e7ac9fd1a","Type":"ContainerStarted","Data":"5e5df73ce30f11608a8902b8d2d9be7800e7590ac967cacf59698d52b0f55fa8"} Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.843030 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.844472 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" event={"ID":"dae97001-293e-4307-8e11-86a9bb275b85","Type":"ContainerStarted","Data":"1ed77dc84daa4f85a9a274679e5616c2448d157ce2455da3ce53851467335dbe"} Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.844919 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.846239 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" event={"ID":"2b1caddd-fa9b-49de-bada-8cbeb89882e7","Type":"ContainerStarted","Data":"586e3f049f77bec2ad7d18e812d5a59f2e539973c0a1ecc62cf371156824b95b"} Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.846683 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.863549 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" podStartSLOduration=3.7619056779999998 podStartE2EDuration="45.863531814s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.581540388 +0000 UTC m=+967.908254116" lastFinishedPulling="2026-02-16 13:20:10.683166514 +0000 UTC m=+1010.009880252" observedRunningTime="2026-02-16 13:20:11.858951899 +0000 UTC m=+1011.185665627" watchObservedRunningTime="2026-02-16 13:20:11.863531814 +0000 UTC m=+1011.190245542" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.896701 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" podStartSLOduration=3.749383778 podStartE2EDuration="45.896687285s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.546580227 +0000 UTC m=+967.873293955" lastFinishedPulling="2026-02-16 13:20:10.693883724 +0000 UTC m=+1010.020597462" observedRunningTime="2026-02-16 13:20:11.883121226 +0000 UTC m=+1011.209834954" watchObservedRunningTime="2026-02-16 13:20:11.896687285 +0000 UTC m=+1011.223401003" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.898954 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" podStartSLOduration=3.426181404 podStartE2EDuration="45.898947426s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.181498765 +0000 UTC m=+967.508212493" lastFinishedPulling="2026-02-16 13:20:10.654264787 +0000 UTC m=+1009.980978515" observedRunningTime="2026-02-16 13:20:11.893951291 +0000 UTC m=+1011.220665019" watchObservedRunningTime="2026-02-16 13:20:11.898947426 +0000 UTC m=+1011.225661154" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.947781 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" podStartSLOduration=31.600546869 podStartE2EDuration="45.947758383s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:56.335674591 +0000 UTC m=+995.662388329" lastFinishedPulling="2026-02-16 13:20:10.682886115 +0000 UTC m=+1010.009599843" observedRunningTime="2026-02-16 13:20:11.915430004 +0000 UTC m=+1011.242143732" watchObservedRunningTime="2026-02-16 13:20:11.947758383 +0000 UTC m=+1011.274472111" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.980765 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" podStartSLOduration=4.046567436 podStartE2EDuration="45.9807415s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.724713719 +0000 UTC m=+968.051427457" lastFinishedPulling="2026-02-16 13:20:10.658887783 +0000 UTC m=+1009.985601521" observedRunningTime="2026-02-16 13:20:11.977471081 +0000 UTC m=+1011.304184809" watchObservedRunningTime="2026-02-16 13:20:11.9807415 +0000 UTC m=+1011.307455228" Feb 16 13:20:11 crc kubenswrapper[4816]: I0216 13:20:11.985892 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" podStartSLOduration=3.518858002 podStartE2EDuration="45.985876109s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.180798436 +0000 UTC m=+967.507512164" lastFinishedPulling="2026-02-16 13:20:10.647816523 +0000 UTC m=+1009.974530271" observedRunningTime="2026-02-16 13:20:11.950992491 +0000 UTC m=+1011.277706219" watchObservedRunningTime="2026-02-16 13:20:11.985876109 +0000 UTC m=+1011.312589837" Feb 16 13:20:14 crc kubenswrapper[4816]: I0216 13:20:14.317946 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" podUID="ee88d6f1-148e-4a18-ae88-4bdda1df4d65" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.83:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 13:20:14 crc kubenswrapper[4816]: I0216 13:20:14.318947 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-5cd688d8fc-w74kn" Feb 16 13:20:14 crc kubenswrapper[4816]: I0216 13:20:14.347399 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" podStartSLOduration=6.212576743 podStartE2EDuration="48.347383351s" podCreationTimestamp="2026-02-16 13:19:26 +0000 UTC" firstStartedPulling="2026-02-16 13:19:28.519405628 +0000 UTC m=+967.846119356" lastFinishedPulling="2026-02-16 13:20:10.654212236 +0000 UTC m=+1009.980925964" observedRunningTime="2026-02-16 13:20:12.150234736 +0000 UTC m=+1011.476948464" watchObservedRunningTime="2026-02-16 13:20:14.347383351 +0000 UTC m=+1013.674097079" Feb 16 13:20:16 crc kubenswrapper[4816]: I0216 13:20:16.935568 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-5d946d989d-zgztq" Feb 16 13:20:17 crc kubenswrapper[4816]: I0216 13:20:17.028624 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987464f4-2qsmj" Feb 16 13:20:17 crc kubenswrapper[4816]: I0216 13:20:17.063480 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-69f8888797-4lc7x" Feb 16 13:20:17 crc kubenswrapper[4816]: I0216 13:20:17.188148 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-b4d948c87-pxkc2" Feb 16 13:20:17 crc kubenswrapper[4816]: I0216 13:20:17.216695 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-54f6768c69-5fh52" Feb 16 13:20:17 crc kubenswrapper[4816]: I0216 13:20:17.463267 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-68f46476f-hpwxn" Feb 16 13:20:17 crc kubenswrapper[4816]: I0216 13:20:17.983816 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" podUID="de3a103e-4d92-4cc1-a7c2-5ab7c14da448" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.79:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 13:20:22 crc kubenswrapper[4816]: I0216 13:20:22.541755 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79d975b745-rx9cb" Feb 16 13:20:23 crc kubenswrapper[4816]: I0216 13:20:23.288063 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" Feb 16 13:20:26 crc kubenswrapper[4816]: I0216 13:20:26.943844 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6994f66f48-ssbf6" Feb 16 13:20:36 crc kubenswrapper[4816]: I0216 13:20:36.941321 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:20:36 crc kubenswrapper[4816]: I0216 13:20:36.942023 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.470908 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7lw8q"] Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.472351 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.475842 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7lw8q"] Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.476038 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.476328 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.476502 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-fgzfz" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.476748 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.532183 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-w4v82"] Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.533606 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.542075 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.544493 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-w4v82"] Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.567471 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-w4v82\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.567588 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/114a65e3-b4be-48b0-804e-516e7ee4a871-config\") pod \"dnsmasq-dns-675f4bcbfc-7lw8q\" (UID: \"114a65e3-b4be-48b0-804e-516e7ee4a871\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.567612 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-config\") pod \"dnsmasq-dns-78dd6ddcc-w4v82\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.567635 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwxzk\" (UniqueName: \"kubernetes.io/projected/e49e4328-5261-4c8e-aac9-702dbc6a8125-kube-api-access-wwxzk\") pod \"dnsmasq-dns-78dd6ddcc-w4v82\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.567712 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cv2n\" (UniqueName: \"kubernetes.io/projected/114a65e3-b4be-48b0-804e-516e7ee4a871-kube-api-access-5cv2n\") pod \"dnsmasq-dns-675f4bcbfc-7lw8q\" (UID: \"114a65e3-b4be-48b0-804e-516e7ee4a871\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.669344 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-w4v82\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.670004 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/114a65e3-b4be-48b0-804e-516e7ee4a871-config\") pod \"dnsmasq-dns-675f4bcbfc-7lw8q\" (UID: \"114a65e3-b4be-48b0-804e-516e7ee4a871\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.670133 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-config\") pod \"dnsmasq-dns-78dd6ddcc-w4v82\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.670264 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwxzk\" (UniqueName: \"kubernetes.io/projected/e49e4328-5261-4c8e-aac9-702dbc6a8125-kube-api-access-wwxzk\") pod \"dnsmasq-dns-78dd6ddcc-w4v82\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.670386 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cv2n\" (UniqueName: \"kubernetes.io/projected/114a65e3-b4be-48b0-804e-516e7ee4a871-kube-api-access-5cv2n\") pod \"dnsmasq-dns-675f4bcbfc-7lw8q\" (UID: \"114a65e3-b4be-48b0-804e-516e7ee4a871\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.670280 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-w4v82\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.670853 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-config\") pod \"dnsmasq-dns-78dd6ddcc-w4v82\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.671734 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/114a65e3-b4be-48b0-804e-516e7ee4a871-config\") pod \"dnsmasq-dns-675f4bcbfc-7lw8q\" (UID: \"114a65e3-b4be-48b0-804e-516e7ee4a871\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.690586 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cv2n\" (UniqueName: \"kubernetes.io/projected/114a65e3-b4be-48b0-804e-516e7ee4a871-kube-api-access-5cv2n\") pod \"dnsmasq-dns-675f4bcbfc-7lw8q\" (UID: \"114a65e3-b4be-48b0-804e-516e7ee4a871\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.690589 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwxzk\" (UniqueName: \"kubernetes.io/projected/e49e4328-5261-4c8e-aac9-702dbc6a8125-kube-api-access-wwxzk\") pod \"dnsmasq-dns-78dd6ddcc-w4v82\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.789353 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:20:45 crc kubenswrapper[4816]: I0216 13:20:45.850308 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.215567 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-w4v82"] Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.338156 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7lw8q"] Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.368903 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" event={"ID":"e49e4328-5261-4c8e-aac9-702dbc6a8125","Type":"ContainerStarted","Data":"572f5ce3f0d7a882dfaf6ed546b867a68d145481fdc364ad9c6bd3d1aa62c9ea"} Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.387948 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-65qn9"] Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.389065 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.402088 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-65qn9"] Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.415759 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7lw8q"] Feb 16 13:20:46 crc kubenswrapper[4816]: W0216 13:20:46.456629 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod114a65e3_b4be_48b0_804e_516e7ee4a871.slice/crio-591d25b4df6a051136ac8def25541612331dae8407dc69f08d95b811714d327b WatchSource:0}: Error finding container 591d25b4df6a051136ac8def25541612331dae8407dc69f08d95b811714d327b: Status 404 returned error can't find the container with id 591d25b4df6a051136ac8def25541612331dae8407dc69f08d95b811714d327b Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.487295 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdk6f\" (UniqueName: \"kubernetes.io/projected/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-kube-api-access-sdk6f\") pod \"dnsmasq-dns-666b6646f7-65qn9\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.487352 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-dns-svc\") pod \"dnsmasq-dns-666b6646f7-65qn9\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.487370 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-config\") pod \"dnsmasq-dns-666b6646f7-65qn9\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.588299 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdk6f\" (UniqueName: \"kubernetes.io/projected/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-kube-api-access-sdk6f\") pod \"dnsmasq-dns-666b6646f7-65qn9\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.588351 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-dns-svc\") pod \"dnsmasq-dns-666b6646f7-65qn9\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.588376 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-config\") pod \"dnsmasq-dns-666b6646f7-65qn9\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.589272 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-config\") pod \"dnsmasq-dns-666b6646f7-65qn9\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.589561 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-dns-svc\") pod \"dnsmasq-dns-666b6646f7-65qn9\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.623862 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdk6f\" (UniqueName: \"kubernetes.io/projected/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-kube-api-access-sdk6f\") pod \"dnsmasq-dns-666b6646f7-65qn9\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.716746 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-w4v82"] Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.729569 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.733327 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-pxlch"] Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.739726 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.767355 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-pxlch"] Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.791354 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-pxlch\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.791399 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-config\") pod \"dnsmasq-dns-57d769cc4f-pxlch\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.791440 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6x7p\" (UniqueName: \"kubernetes.io/projected/16f3d17e-33c1-4155-822f-a21017cba883-kube-api-access-h6x7p\") pod \"dnsmasq-dns-57d769cc4f-pxlch\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.892233 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-pxlch\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.892286 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-config\") pod \"dnsmasq-dns-57d769cc4f-pxlch\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.892329 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6x7p\" (UniqueName: \"kubernetes.io/projected/16f3d17e-33c1-4155-822f-a21017cba883-kube-api-access-h6x7p\") pod \"dnsmasq-dns-57d769cc4f-pxlch\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.893488 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-pxlch\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.894126 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-config\") pod \"dnsmasq-dns-57d769cc4f-pxlch\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:46 crc kubenswrapper[4816]: I0216 13:20:46.922287 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6x7p\" (UniqueName: \"kubernetes.io/projected/16f3d17e-33c1-4155-822f-a21017cba883-kube-api-access-h6x7p\") pod \"dnsmasq-dns-57d769cc4f-pxlch\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.095864 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.379445 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-65qn9"] Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.389574 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" event={"ID":"114a65e3-b4be-48b0-804e-516e7ee4a871","Type":"ContainerStarted","Data":"591d25b4df6a051136ac8def25541612331dae8407dc69f08d95b811714d327b"} Feb 16 13:20:47 crc kubenswrapper[4816]: W0216 13:20:47.393145 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c5c27a_8fd9_4218_bb63_2703f52b21fb.slice/crio-6e7950cabbbbd74aa81ba40cae963e51dbf08f307185611a5840ea9037bc61f3 WatchSource:0}: Error finding container 6e7950cabbbbd74aa81ba40cae963e51dbf08f307185611a5840ea9037bc61f3: Status 404 returned error can't find the container with id 6e7950cabbbbd74aa81ba40cae963e51dbf08f307185611a5840ea9037bc61f3 Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.556109 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.558481 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.562118 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.562298 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.562419 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.562493 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-m9hmn" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.562694 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.562756 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.567301 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.569823 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.608913 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.608974 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.608998 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9eb39773-46a3-4f31-a95a-64a183dbe417-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.609026 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.609066 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9eb39773-46a3-4f31-a95a-64a183dbe417-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.609086 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.609118 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.609164 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72r48\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-kube-api-access-72r48\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.609317 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.610261 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.610335 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.713847 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.713894 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.713926 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9eb39773-46a3-4f31-a95a-64a183dbe417-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.713949 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.713976 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9eb39773-46a3-4f31-a95a-64a183dbe417-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.714005 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.714041 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.714094 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72r48\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-kube-api-access-72r48\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.714124 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.714173 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.714208 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.714985 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.715087 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.715221 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.721714 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.722043 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.722768 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9eb39773-46a3-4f31-a95a-64a183dbe417-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.723396 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.730209 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.730300 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9eb39773-46a3-4f31-a95a-64a183dbe417-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.745033 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.747862 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72r48\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-kube-api-access-72r48\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.789250 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " pod="openstack/rabbitmq-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.792928 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-pxlch"] Feb 16 13:20:47 crc kubenswrapper[4816]: W0216 13:20:47.795475 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16f3d17e_33c1_4155_822f_a21017cba883.slice/crio-ddbd8a74b0ba7c4745efd880f52357554e15c5f1eef8fe8bc138606fddb66ca5 WatchSource:0}: Error finding container ddbd8a74b0ba7c4745efd880f52357554e15c5f1eef8fe8bc138606fddb66ca5: Status 404 returned error can't find the container with id ddbd8a74b0ba7c4745efd880f52357554e15c5f1eef8fe8bc138606fddb66ca5 Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.880760 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.882124 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.886405 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.886534 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.886647 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.887216 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-kr7td" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.887337 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.888755 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.898834 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.918632 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.919185 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.919232 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7wh5\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-kube-api-access-v7wh5\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.919262 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.919307 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.919354 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ecfcee51-c740-477a-87d9-558fffc58686-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.919382 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.921377 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.921448 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.921466 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.921509 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.921562 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ecfcee51-c740-477a-87d9-558fffc58686-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:47 crc kubenswrapper[4816]: I0216 13:20:47.928441 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.302773 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ecfcee51-c740-477a-87d9-558fffc58686-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303048 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303100 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303145 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303164 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303191 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303226 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ecfcee51-c740-477a-87d9-558fffc58686-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303262 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303289 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7wh5\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-kube-api-access-v7wh5\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303318 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.303358 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.304770 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.308318 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.309164 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.309460 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.311504 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.317158 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.317805 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.320456 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ecfcee51-c740-477a-87d9-558fffc58686-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.322760 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ecfcee51-c740-477a-87d9-558fffc58686-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.323263 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.332455 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7wh5\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-kube-api-access-v7wh5\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.349965 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.460371 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" event={"ID":"16f3d17e-33c1-4155-822f-a21017cba883","Type":"ContainerStarted","Data":"ddbd8a74b0ba7c4745efd880f52357554e15c5f1eef8fe8bc138606fddb66ca5"} Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.463517 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-65qn9" event={"ID":"e9c5c27a-8fd9-4218-bb63-2703f52b21fb","Type":"ContainerStarted","Data":"6e7950cabbbbd74aa81ba40cae963e51dbf08f307185611a5840ea9037bc61f3"} Feb 16 13:20:48 crc kubenswrapper[4816]: I0216 13:20:48.522561 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.023414 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.035033 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.039499 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-szgkj" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.040382 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.043050 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.043324 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.049518 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.050559 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.143208 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.143258 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.143297 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.143330 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.143367 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.143537 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c7fl\" (UniqueName: \"kubernetes.io/projected/4a9428a1-a54a-4e85-b898-1eac97438ba8-kube-api-access-9c7fl\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.143601 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.143627 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.148714 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.244996 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.245360 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.245399 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.245433 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.245452 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c7fl\" (UniqueName: \"kubernetes.io/projected/4a9428a1-a54a-4e85-b898-1eac97438ba8-kube-api-access-9c7fl\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.245467 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.245482 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.246000 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.246041 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.246293 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.246574 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.247018 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.247223 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.250990 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.268814 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.275102 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c7fl\" (UniqueName: \"kubernetes.io/projected/4a9428a1-a54a-4e85-b898-1eac97438ba8-kube-api-access-9c7fl\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.281015 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.389896 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.485604 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9eb39773-46a3-4f31-a95a-64a183dbe417","Type":"ContainerStarted","Data":"5e52400a84762e69c935f28ca297da68c40d2dc0bddca8c2b4ddac7267c9dd35"} Feb 16 13:20:49 crc kubenswrapper[4816]: I0216 13:20:49.781567 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 13:20:50 crc kubenswrapper[4816]: I0216 13:20:50.155512 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 16 13:20:50 crc kubenswrapper[4816]: I0216 13:20:50.516490 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfcee51-c740-477a-87d9-558fffc58686","Type":"ContainerStarted","Data":"0531cfe15c23253e9c798537b38b9f448a51443d71b2b9b500234d577f77b5ff"} Feb 16 13:20:50 crc kubenswrapper[4816]: I0216 13:20:50.521823 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a9428a1-a54a-4e85-b898-1eac97438ba8","Type":"ContainerStarted","Data":"d243137c22cc21b5b80520bb6757b21be89545b973f84d2df17a484e06e87be6"} Feb 16 13:20:50 crc kubenswrapper[4816]: I0216 13:20:50.972408 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 16 13:20:50 crc kubenswrapper[4816]: I0216 13:20:50.977613 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:50 crc kubenswrapper[4816]: I0216 13:20:50.984311 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-j7d7z" Feb 16 13:20:50 crc kubenswrapper[4816]: I0216 13:20:50.984990 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 16 13:20:50 crc kubenswrapper[4816]: I0216 13:20:50.987420 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 16 13:20:50 crc kubenswrapper[4816]: I0216 13:20:50.994853 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.052377 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.241142 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxhl2\" (UniqueName: \"kubernetes.io/projected/8cdb34b1-893f-4701-89b2-195db5c6c03b-kube-api-access-hxhl2\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.241201 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.241232 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.241258 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.241278 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.241337 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.241359 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.241390 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.342687 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxhl2\" (UniqueName: \"kubernetes.io/projected/8cdb34b1-893f-4701-89b2-195db5c6c03b-kube-api-access-hxhl2\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.342790 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.342825 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.342862 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.342912 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.342937 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.342965 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.343006 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.344167 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.351075 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.352416 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.354408 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.357528 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.372211 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxhl2\" (UniqueName: \"kubernetes.io/projected/8cdb34b1-893f-4701-89b2-195db5c6c03b-kube-api-access-hxhl2\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.374351 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.384635 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.387771 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.393140 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.400601 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-ml7rf" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.400913 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.401114 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.439041 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.546536 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-config-data\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.546596 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.546696 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs9bj\" (UniqueName: \"kubernetes.io/projected/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kube-api-access-rs9bj\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.548281 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kolla-config\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.548317 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.550001 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.603260 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.650009 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kolla-config\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.650067 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.650147 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-config-data\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.650178 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.650214 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs9bj\" (UniqueName: \"kubernetes.io/projected/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kube-api-access-rs9bj\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.654775 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kolla-config\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.663018 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-config-data\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.663584 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.671898 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs9bj\" (UniqueName: \"kubernetes.io/projected/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kube-api-access-rs9bj\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.672599 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " pod="openstack/memcached-0" Feb 16 13:20:51 crc kubenswrapper[4816]: I0216 13:20:51.808346 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 16 13:20:52 crc kubenswrapper[4816]: I0216 13:20:52.223541 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 16 13:20:52 crc kubenswrapper[4816]: I0216 13:20:52.580129 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8cdb34b1-893f-4701-89b2-195db5c6c03b","Type":"ContainerStarted","Data":"2b6455348f4b61d063a9f3c519fcce891590ef1ac0f4dc572d0e2ce5be92cb8f"} Feb 16 13:20:52 crc kubenswrapper[4816]: I0216 13:20:52.834227 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 16 13:20:53 crc kubenswrapper[4816]: I0216 13:20:53.209447 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:20:53 crc kubenswrapper[4816]: I0216 13:20:53.210627 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 13:20:53 crc kubenswrapper[4816]: I0216 13:20:53.308697 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-qfrdk" Feb 16 13:20:53 crc kubenswrapper[4816]: I0216 13:20:53.460287 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbnc4\" (UniqueName: \"kubernetes.io/projected/ccaf33e6-b7e7-42b7-9ab5-dea152b2853f-kube-api-access-jbnc4\") pod \"kube-state-metrics-0\" (UID: \"ccaf33e6-b7e7-42b7-9ab5-dea152b2853f\") " pod="openstack/kube-state-metrics-0" Feb 16 13:20:53 crc kubenswrapper[4816]: I0216 13:20:53.575252 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbnc4\" (UniqueName: \"kubernetes.io/projected/ccaf33e6-b7e7-42b7-9ab5-dea152b2853f-kube-api-access-jbnc4\") pod \"kube-state-metrics-0\" (UID: \"ccaf33e6-b7e7-42b7-9ab5-dea152b2853f\") " pod="openstack/kube-state-metrics-0" Feb 16 13:20:53 crc kubenswrapper[4816]: I0216 13:20:53.681251 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:20:53 crc kubenswrapper[4816]: I0216 13:20:53.767407 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb","Type":"ContainerStarted","Data":"c0b62e380f7cfbbdde6b3f9d9d08896bafb0952a9e6287bad8e11a6560834734"} Feb 16 13:20:53 crc kubenswrapper[4816]: I0216 13:20:53.805477 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbnc4\" (UniqueName: \"kubernetes.io/projected/ccaf33e6-b7e7-42b7-9ab5-dea152b2853f-kube-api-access-jbnc4\") pod \"kube-state-metrics-0\" (UID: \"ccaf33e6-b7e7-42b7-9ab5-dea152b2853f\") " pod="openstack/kube-state-metrics-0" Feb 16 13:20:53 crc kubenswrapper[4816]: I0216 13:20:53.979692 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 13:20:55 crc kubenswrapper[4816]: I0216 13:20:55.377676 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:20:55 crc kubenswrapper[4816]: I0216 13:20:55.804019 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ccaf33e6-b7e7-42b7-9ab5-dea152b2853f","Type":"ContainerStarted","Data":"eff8effdcc3f74fcdbe83e242ead2381ef59e69e85f7c85622fce7b8c76e3489"} Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.661167 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-v9w6q"] Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.662185 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.665788 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vjxgj" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.667342 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.667461 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.683633 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-rfd9r"] Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.685062 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.702797 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v9w6q"] Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.708800 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.718249 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-rfd9r"] Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.718356 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.757506 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.761699 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.762044 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.762086 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.762225 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.762378 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-nxjjh" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817116 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817159 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-log-ovn\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817184 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-scripts\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817200 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817215 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-scripts\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817234 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfh74\" (UniqueName: \"kubernetes.io/projected/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-kube-api-access-zfh74\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817267 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817294 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-lib\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817469 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-combined-ca-bundle\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817495 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-etc-ovs\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817511 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-log\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817554 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzwqc\" (UniqueName: \"kubernetes.io/projected/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-kube-api-access-jzwqc\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817569 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run-ovn\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817591 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-config\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817605 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817628 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817645 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-ovn-controller-tls-certs\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817676 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-run\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817731 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5x2kt\" (UniqueName: \"kubernetes.io/projected/7d49f257-3900-43a9-b4c2-353ceeeeea88-kube-api-access-5x2kt\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817755 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.817790 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.918912 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5x2kt\" (UniqueName: \"kubernetes.io/projected/7d49f257-3900-43a9-b4c2-353ceeeeea88-kube-api-access-5x2kt\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.918960 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.918995 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919019 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919037 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-log-ovn\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919057 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-scripts\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919072 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-scripts\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919086 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919103 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfh74\" (UniqueName: \"kubernetes.io/projected/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-kube-api-access-zfh74\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919126 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919149 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-lib\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919163 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-combined-ca-bundle\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919187 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-etc-ovs\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919200 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-log\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919225 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzwqc\" (UniqueName: \"kubernetes.io/projected/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-kube-api-access-jzwqc\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919239 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run-ovn\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919258 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-config\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919274 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919295 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919312 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-ovn-controller-tls-certs\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.919327 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-run\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.920204 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.920275 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-lib\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.920374 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-run\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.920631 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-log-ovn\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.921249 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run-ovn\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.922890 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-scripts\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.923200 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.925429 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-log\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.925555 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-scripts\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.927190 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.921412 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-etc-ovs\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.932579 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.934232 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.940281 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.945674 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5x2kt\" (UniqueName: \"kubernetes.io/projected/7d49f257-3900-43a9-b4c2-353ceeeeea88-kube-api-access-5x2kt\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.963924 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfh74\" (UniqueName: \"kubernetes.io/projected/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-kube-api-access-zfh74\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.970890 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.975159 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-combined-ca-bundle\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.981512 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzwqc\" (UniqueName: \"kubernetes.io/projected/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-kube-api-access-jzwqc\") pod \"ovn-controller-ovs-rfd9r\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.984063 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-config\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.986435 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-ovn-controller-tls-certs\") pod \"ovn-controller-v9w6q\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:56 crc kubenswrapper[4816]: I0216 13:20:56.989417 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:57 crc kubenswrapper[4816]: I0216 13:20:57.003573 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q" Feb 16 13:20:57 crc kubenswrapper[4816]: I0216 13:20:57.062891 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:20:57 crc kubenswrapper[4816]: I0216 13:20:57.094742 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 16 13:20:57 crc kubenswrapper[4816]: I0216 13:20:57.862618 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v9w6q"] Feb 16 13:20:57 crc kubenswrapper[4816]: W0216 13:20:57.876795 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9c2b7b7_d48b_41e5_9591_5b5470cfca1e.slice/crio-99f7dbff425ca678b68c841d68edab0cb2552f423d0e4f16f7d69943fea9e9e3 WatchSource:0}: Error finding container 99f7dbff425ca678b68c841d68edab0cb2552f423d0e4f16f7d69943fea9e9e3: Status 404 returned error can't find the container with id 99f7dbff425ca678b68c841d68edab0cb2552f423d0e4f16f7d69943fea9e9e3 Feb 16 13:20:58 crc kubenswrapper[4816]: I0216 13:20:58.258311 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 16 13:20:58 crc kubenswrapper[4816]: W0216 13:20:58.270487 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d49f257_3900_43a9_b4c2_353ceeeeea88.slice/crio-2746e3a85e9e93efbb0bef5286b51102cb12a12dcef40dcc18a11315f4192f5e WatchSource:0}: Error finding container 2746e3a85e9e93efbb0bef5286b51102cb12a12dcef40dcc18a11315f4192f5e: Status 404 returned error can't find the container with id 2746e3a85e9e93efbb0bef5286b51102cb12a12dcef40dcc18a11315f4192f5e Feb 16 13:20:58 crc kubenswrapper[4816]: I0216 13:20:58.444150 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-rfd9r"] Feb 16 13:20:58 crc kubenswrapper[4816]: W0216 13:20:58.453256 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4ac4b8a_a945_4f89_9ae4_933ab04dce2b.slice/crio-8790e8895d3624d6c28689d4fe219054db76d82b207424cf0fd1b1cb8276b9ff WatchSource:0}: Error finding container 8790e8895d3624d6c28689d4fe219054db76d82b207424cf0fd1b1cb8276b9ff: Status 404 returned error can't find the container with id 8790e8895d3624d6c28689d4fe219054db76d82b207424cf0fd1b1cb8276b9ff Feb 16 13:20:58 crc kubenswrapper[4816]: I0216 13:20:58.856454 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q" event={"ID":"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e","Type":"ContainerStarted","Data":"99f7dbff425ca678b68c841d68edab0cb2552f423d0e4f16f7d69943fea9e9e3"} Feb 16 13:20:58 crc kubenswrapper[4816]: I0216 13:20:58.858133 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rfd9r" event={"ID":"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b","Type":"ContainerStarted","Data":"8790e8895d3624d6c28689d4fe219054db76d82b207424cf0fd1b1cb8276b9ff"} Feb 16 13:20:58 crc kubenswrapper[4816]: I0216 13:20:58.859494 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7d49f257-3900-43a9-b4c2-353ceeeeea88","Type":"ContainerStarted","Data":"2746e3a85e9e93efbb0bef5286b51102cb12a12dcef40dcc18a11315f4192f5e"} Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.022865 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-qxznq"] Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.023879 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.025198 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.028638 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qxznq"] Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.092751 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-combined-ca-bundle\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.092828 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ls9k7\" (UniqueName: \"kubernetes.io/projected/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-kube-api-access-ls9k7\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.092870 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.093015 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.093107 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovn-rundir\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.093200 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovs-rundir\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.194586 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovn-rundir\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.194686 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovs-rundir\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.194919 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-combined-ca-bundle\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.194943 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ls9k7\" (UniqueName: \"kubernetes.io/projected/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-kube-api-access-ls9k7\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.194969 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.195026 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.207280 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovs-rundir\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.207484 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovn-rundir\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.210697 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.225020 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-combined-ca-bundle\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.226439 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ls9k7\" (UniqueName: \"kubernetes.io/projected/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-kube-api-access-ls9k7\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.240680 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qxznq\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.378097 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.439145 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-pxlch"] Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.468674 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xcsds"] Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.472294 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.476763 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.488792 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xcsds"] Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.611158 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-config\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.611220 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.611255 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6fmq\" (UniqueName: \"kubernetes.io/projected/f8ae7652-9970-431b-9eb8-69b03f1ba522-kube-api-access-p6fmq\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.611761 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.713022 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.713079 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6fmq\" (UniqueName: \"kubernetes.io/projected/f8ae7652-9970-431b-9eb8-69b03f1ba522-kube-api-access-p6fmq\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.713195 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.713220 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-config\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.713938 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.714004 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-config\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.714289 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.775952 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6fmq\" (UniqueName: \"kubernetes.io/projected/f8ae7652-9970-431b-9eb8-69b03f1ba522-kube-api-access-p6fmq\") pod \"dnsmasq-dns-6bc7876d45-xcsds\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:20:59 crc kubenswrapper[4816]: I0216 13:20:59.809054 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.151057 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.152566 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.156304 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-bc6lg" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.156517 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.172385 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.173955 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.186289 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.338108 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.338167 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.338193 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtltz\" (UniqueName: \"kubernetes.io/projected/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-kube-api-access-rtltz\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.338221 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.338280 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.338313 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-config\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.338345 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.338384 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.444593 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.444682 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-config\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.444721 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.444763 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.444891 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.444920 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.444937 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtltz\" (UniqueName: \"kubernetes.io/projected/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-kube-api-access-rtltz\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.444962 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.445231 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.445493 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.446426 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.448697 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-config\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.458206 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.459435 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.470670 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtltz\" (UniqueName: \"kubernetes.io/projected/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-kube-api-access-rtltz\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.470950 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.477237 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:00 crc kubenswrapper[4816]: I0216 13:21:00.511458 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:06 crc kubenswrapper[4816]: I0216 13:21:06.941277 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:21:06 crc kubenswrapper[4816]: I0216 13:21:06.941872 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:21:06 crc kubenswrapper[4816]: I0216 13:21:06.941914 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:21:06 crc kubenswrapper[4816]: I0216 13:21:06.942531 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"64ab67741e223081f84c6d63a99c0d895038e507375b2c1f1a0cf120b6972be0"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:21:06 crc kubenswrapper[4816]: I0216 13:21:06.942575 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://64ab67741e223081f84c6d63a99c0d895038e507375b2c1f1a0cf120b6972be0" gracePeriod=600 Feb 16 13:21:08 crc kubenswrapper[4816]: I0216 13:21:08.012470 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="64ab67741e223081f84c6d63a99c0d895038e507375b2c1f1a0cf120b6972be0" exitCode=0 Feb 16 13:21:08 crc kubenswrapper[4816]: I0216 13:21:08.012547 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"64ab67741e223081f84c6d63a99c0d895038e507375b2c1f1a0cf120b6972be0"} Feb 16 13:21:08 crc kubenswrapper[4816]: I0216 13:21:08.013339 4816 scope.go:117] "RemoveContainer" containerID="7e0981cd562f683639a286c8a9849e9acb7985787e5b7fb344492cda47873ec7" Feb 16 13:21:15 crc kubenswrapper[4816]: E0216 13:21:15.067961 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Feb 16 13:21:15 crc kubenswrapper[4816]: E0216 13:21:15.068484 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-72r48,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(9eb39773-46a3-4f31-a95a-64a183dbe417): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:21:15 crc kubenswrapper[4816]: E0216 13:21:15.069762 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="9eb39773-46a3-4f31-a95a-64a183dbe417" Feb 16 13:21:15 crc kubenswrapper[4816]: E0216 13:21:15.183564 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Feb 16 13:21:15 crc kubenswrapper[4816]: E0216 13:21:15.184163 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v7wh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(ecfcee51-c740-477a-87d9-558fffc58686): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:21:15 crc kubenswrapper[4816]: E0216 13:21:15.185876 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="ecfcee51-c740-477a-87d9-558fffc58686" Feb 16 13:21:15 crc kubenswrapper[4816]: E0216 13:21:15.347627 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="ecfcee51-c740-477a-87d9-558fffc58686" Feb 16 13:21:15 crc kubenswrapper[4816]: E0216 13:21:15.348733 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="9eb39773-46a3-4f31-a95a-64a183dbe417" Feb 16 13:21:21 crc kubenswrapper[4816]: E0216 13:21:21.490715 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Feb 16 13:21:21 crc kubenswrapper[4816]: E0216 13:21:21.492619 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n5f8h59bhbchb4h69h648h5d8hc4h89h58fh66ch9fh9ch558h5f8h84h55fhd7h57hdh54fh58fh89hd7h56bh5dbh556h65bh7fhb5h547h548q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rs9bj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(35fffc20-e4dc-43ad-8a7f-64da2e1ceebb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:21:21 crc kubenswrapper[4816]: E0216 13:21:21.493956 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" Feb 16 13:21:22 crc kubenswrapper[4816]: E0216 13:21:22.473061 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.078863 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.079009 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:ovsdb-server-init,Image:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,Command:[/usr/local/bin/container-scripts/init-ovsdb-server.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5b6h79h65dhc6h5fdh599h68hd8hfdh6fh66h59ch57h645hfhb4h554h679h7fh657h65bh5hddh674h9ch5cch687hc9h668h55dh65fh78q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-ovs,ReadOnly:false,MountPath:/etc/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log,ReadOnly:false,MountPath:/var/log/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib,ReadOnly:false,MountPath:/var/lib/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jzwqc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-ovs-rfd9r_openstack(f4ac4b8a-a945-4f89-9ae4-933ab04dce2b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.080164 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.481271 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified\\\"\"" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.856778 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.857329 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h6x7p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-pxlch_openstack(16f3d17e-33c1-4155-822f-a21017cba883): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.858529 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" podUID="16f3d17e-33c1-4155-822f-a21017cba883" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.865623 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.865783 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sdk6f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-65qn9_openstack(e9c5c27a-8fd9-4218-bb63-2703f52b21fb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.866987 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-65qn9" podUID="e9c5c27a-8fd9-4218-bb63-2703f52b21fb" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.940352 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.940541 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wwxzk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-w4v82_openstack(e49e4328-5261-4c8e-aac9-702dbc6a8125): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.941725 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" podUID="e49e4328-5261-4c8e-aac9-702dbc6a8125" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.942783 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.942902 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5cv2n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-7lw8q_openstack(114a65e3-b4be-48b0-804e-516e7ee4a871): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:21:23 crc kubenswrapper[4816]: E0216 13:21:23.944176 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" podUID="114a65e3-b4be-48b0-804e-516e7ee4a871" Feb 16 13:21:24 crc kubenswrapper[4816]: E0216 13:21:24.491115 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-65qn9" podUID="e9c5c27a-8fd9-4218-bb63-2703f52b21fb" Feb 16 13:21:24 crc kubenswrapper[4816]: I0216 13:21:24.661801 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xcsds"] Feb 16 13:21:24 crc kubenswrapper[4816]: I0216 13:21:24.782505 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qxznq"] Feb 16 13:21:25 crc kubenswrapper[4816]: W0216 13:21:25.053825 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9f02af3_d5f6_4c8c_81b6_9889d79b0925.slice/crio-77f2cb4125f7b591ccbdd43677e57982b05d6d57b3f6c04b744457270b044c66 WatchSource:0}: Error finding container 77f2cb4125f7b591ccbdd43677e57982b05d6d57b3f6c04b744457270b044c66: Status 404 returned error can't find the container with id 77f2cb4125f7b591ccbdd43677e57982b05d6d57b3f6c04b744457270b044c66 Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.063140 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.069771 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.072604 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.183340 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-dns-svc\") pod \"e49e4328-5261-4c8e-aac9-702dbc6a8125\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.183686 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-config\") pod \"16f3d17e-33c1-4155-822f-a21017cba883\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.183729 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/114a65e3-b4be-48b0-804e-516e7ee4a871-config\") pod \"114a65e3-b4be-48b0-804e-516e7ee4a871\" (UID: \"114a65e3-b4be-48b0-804e-516e7ee4a871\") " Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.183756 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwxzk\" (UniqueName: \"kubernetes.io/projected/e49e4328-5261-4c8e-aac9-702dbc6a8125-kube-api-access-wwxzk\") pod \"e49e4328-5261-4c8e-aac9-702dbc6a8125\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.183864 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cv2n\" (UniqueName: \"kubernetes.io/projected/114a65e3-b4be-48b0-804e-516e7ee4a871-kube-api-access-5cv2n\") pod \"114a65e3-b4be-48b0-804e-516e7ee4a871\" (UID: \"114a65e3-b4be-48b0-804e-516e7ee4a871\") " Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.183894 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-config\") pod \"e49e4328-5261-4c8e-aac9-702dbc6a8125\" (UID: \"e49e4328-5261-4c8e-aac9-702dbc6a8125\") " Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.183973 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-dns-svc\") pod \"16f3d17e-33c1-4155-822f-a21017cba883\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.183994 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6x7p\" (UniqueName: \"kubernetes.io/projected/16f3d17e-33c1-4155-822f-a21017cba883-kube-api-access-h6x7p\") pod \"16f3d17e-33c1-4155-822f-a21017cba883\" (UID: \"16f3d17e-33c1-4155-822f-a21017cba883\") " Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.187379 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16f3d17e-33c1-4155-822f-a21017cba883-kube-api-access-h6x7p" (OuterVolumeSpecName: "kube-api-access-h6x7p") pod "16f3d17e-33c1-4155-822f-a21017cba883" (UID: "16f3d17e-33c1-4155-822f-a21017cba883"). InnerVolumeSpecName "kube-api-access-h6x7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.187590 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e49e4328-5261-4c8e-aac9-702dbc6a8125-kube-api-access-wwxzk" (OuterVolumeSpecName: "kube-api-access-wwxzk") pod "e49e4328-5261-4c8e-aac9-702dbc6a8125" (UID: "e49e4328-5261-4c8e-aac9-702dbc6a8125"). InnerVolumeSpecName "kube-api-access-wwxzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.188120 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e49e4328-5261-4c8e-aac9-702dbc6a8125" (UID: "e49e4328-5261-4c8e-aac9-702dbc6a8125"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.188538 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-config" (OuterVolumeSpecName: "config") pod "16f3d17e-33c1-4155-822f-a21017cba883" (UID: "16f3d17e-33c1-4155-822f-a21017cba883"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.189175 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/114a65e3-b4be-48b0-804e-516e7ee4a871-config" (OuterVolumeSpecName: "config") pod "114a65e3-b4be-48b0-804e-516e7ee4a871" (UID: "114a65e3-b4be-48b0-804e-516e7ee4a871"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.189474 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "16f3d17e-33c1-4155-822f-a21017cba883" (UID: "16f3d17e-33c1-4155-822f-a21017cba883"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.189552 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-config" (OuterVolumeSpecName: "config") pod "e49e4328-5261-4c8e-aac9-702dbc6a8125" (UID: "e49e4328-5261-4c8e-aac9-702dbc6a8125"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.189562 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/114a65e3-b4be-48b0-804e-516e7ee4a871-kube-api-access-5cv2n" (OuterVolumeSpecName: "kube-api-access-5cv2n") pod "114a65e3-b4be-48b0-804e-516e7ee4a871" (UID: "114a65e3-b4be-48b0-804e-516e7ee4a871"). InnerVolumeSpecName "kube-api-access-5cv2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.286216 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.286251 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.286263 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/114a65e3-b4be-48b0-804e-516e7ee4a871-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.286275 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwxzk\" (UniqueName: \"kubernetes.io/projected/e49e4328-5261-4c8e-aac9-702dbc6a8125-kube-api-access-wwxzk\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.286287 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cv2n\" (UniqueName: \"kubernetes.io/projected/114a65e3-b4be-48b0-804e-516e7ee4a871-kube-api-access-5cv2n\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.286296 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e49e4328-5261-4c8e-aac9-702dbc6a8125-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.286308 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16f3d17e-33c1-4155-822f-a21017cba883-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.286317 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6x7p\" (UniqueName: \"kubernetes.io/projected/16f3d17e-33c1-4155-822f-a21017cba883-kube-api-access-h6x7p\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.305392 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 16 13:21:25 crc kubenswrapper[4816]: W0216 13:21:25.445019 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e895c6d_3b17_40a3_a5b7_b1a2ce1b5279.slice/crio-c4d2d1e05b42fbf3dd6dd32ef929666c7cd1d05b0015769de940328c5daa4e25 WatchSource:0}: Error finding container c4d2d1e05b42fbf3dd6dd32ef929666c7cd1d05b0015769de940328c5daa4e25: Status 404 returned error can't find the container with id c4d2d1e05b42fbf3dd6dd32ef929666c7cd1d05b0015769de940328c5daa4e25 Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.498427 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qxznq" event={"ID":"e9f02af3-d5f6-4c8c-81b6-9889d79b0925","Type":"ContainerStarted","Data":"77f2cb4125f7b591ccbdd43677e57982b05d6d57b3f6c04b744457270b044c66"} Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.499859 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" event={"ID":"f8ae7652-9970-431b-9eb8-69b03f1ba522","Type":"ContainerStarted","Data":"f652c57e0dbcd42f27dd2cce7ac3b359cc6ca5f0e056864996a361dbcf359a71"} Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.501079 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" event={"ID":"16f3d17e-33c1-4155-822f-a21017cba883","Type":"ContainerDied","Data":"ddbd8a74b0ba7c4745efd880f52357554e15c5f1eef8fe8bc138606fddb66ca5"} Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.501090 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-pxlch" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.504386 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" event={"ID":"114a65e3-b4be-48b0-804e-516e7ee4a871","Type":"ContainerDied","Data":"591d25b4df6a051136ac8def25541612331dae8407dc69f08d95b811714d327b"} Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.504444 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-7lw8q" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.511396 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"a16107fccce9c93e96a6d43d25ee1381b11a663b98df0e1296331b66fbfb375f"} Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.521017 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279","Type":"ContainerStarted","Data":"c4d2d1e05b42fbf3dd6dd32ef929666c7cd1d05b0015769de940328c5daa4e25"} Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.522714 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" event={"ID":"e49e4328-5261-4c8e-aac9-702dbc6a8125","Type":"ContainerDied","Data":"572f5ce3f0d7a882dfaf6ed546b867a68d145481fdc364ad9c6bd3d1aa62c9ea"} Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.522841 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-w4v82" Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.595235 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-w4v82"] Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.618029 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-w4v82"] Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.633490 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-pxlch"] Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.641388 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-pxlch"] Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.654196 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7lw8q"] Feb 16 13:21:25 crc kubenswrapper[4816]: I0216 13:21:25.661221 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7lw8q"] Feb 16 13:21:26 crc kubenswrapper[4816]: I0216 13:21:26.532004 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8cdb34b1-893f-4701-89b2-195db5c6c03b","Type":"ContainerStarted","Data":"9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf"} Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.416125 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="114a65e3-b4be-48b0-804e-516e7ee4a871" path="/var/lib/kubelet/pods/114a65e3-b4be-48b0-804e-516e7ee4a871/volumes" Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.417328 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16f3d17e-33c1-4155-822f-a21017cba883" path="/var/lib/kubelet/pods/16f3d17e-33c1-4155-822f-a21017cba883/volumes" Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.417895 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e49e4328-5261-4c8e-aac9-702dbc6a8125" path="/var/lib/kubelet/pods/e49e4328-5261-4c8e-aac9-702dbc6a8125/volumes" Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.544599 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ccaf33e6-b7e7-42b7-9ab5-dea152b2853f","Type":"ContainerStarted","Data":"612e60441780268db4eb25969864048ff43689c774f46ef5ce5d5ba8b8ffe540"} Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.545161 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.547188 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7d49f257-3900-43a9-b4c2-353ceeeeea88","Type":"ContainerStarted","Data":"7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000"} Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.549521 4816 generic.go:334] "Generic (PLEG): container finished" podID="f8ae7652-9970-431b-9eb8-69b03f1ba522" containerID="8aaafd076d6fd5857354bae1f19fde54138f82bc388b3d4be9613ecb6ee3e39e" exitCode=0 Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.549594 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" event={"ID":"f8ae7652-9970-431b-9eb8-69b03f1ba522","Type":"ContainerDied","Data":"8aaafd076d6fd5857354bae1f19fde54138f82bc388b3d4be9613ecb6ee3e39e"} Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.551514 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a9428a1-a54a-4e85-b898-1eac97438ba8","Type":"ContainerStarted","Data":"63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1"} Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.554328 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q" event={"ID":"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e","Type":"ContainerStarted","Data":"ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18"} Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.554394 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-v9w6q" Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.565835 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.610541811 podStartE2EDuration="34.565813557s" podCreationTimestamp="2026-02-16 13:20:53 +0000 UTC" firstStartedPulling="2026-02-16 13:20:55.50721863 +0000 UTC m=+1054.833932358" lastFinishedPulling="2026-02-16 13:21:26.462490376 +0000 UTC m=+1085.789204104" observedRunningTime="2026-02-16 13:21:27.556920134 +0000 UTC m=+1086.883633872" watchObservedRunningTime="2026-02-16 13:21:27.565813557 +0000 UTC m=+1086.892527285" Feb 16 13:21:27 crc kubenswrapper[4816]: I0216 13:21:27.596691 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-v9w6q" podStartSLOduration=4.333040515 podStartE2EDuration="31.59667148s" podCreationTimestamp="2026-02-16 13:20:56 +0000 UTC" firstStartedPulling="2026-02-16 13:20:57.879756949 +0000 UTC m=+1057.206470677" lastFinishedPulling="2026-02-16 13:21:25.143387914 +0000 UTC m=+1084.470101642" observedRunningTime="2026-02-16 13:21:27.591593121 +0000 UTC m=+1086.918306849" watchObservedRunningTime="2026-02-16 13:21:27.59667148 +0000 UTC m=+1086.923385208" Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.566393 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qxznq" event={"ID":"e9f02af3-d5f6-4c8c-81b6-9889d79b0925","Type":"ContainerStarted","Data":"ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786"} Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.570958 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" event={"ID":"f8ae7652-9970-431b-9eb8-69b03f1ba522","Type":"ContainerStarted","Data":"111fb9c6576c67ac91c5280a8a6fc636e12d7f9277679d6b8a16966f95b005e9"} Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.571110 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.579531 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279","Type":"ContainerStarted","Data":"928d717d6d418e23ea08ee966cb3977d296a5c9fa2ddbce5816ef4e5e13e37d9"} Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.579601 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279","Type":"ContainerStarted","Data":"6e64aee290d85b9d87d4f4f3be6dbde686eecaeb8eb080e46016bd90348ba633"} Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.586224 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7d49f257-3900-43a9-b4c2-353ceeeeea88","Type":"ContainerStarted","Data":"21fba53057aa6cf88d2e0405e7ed7ba15f4e8c3f5cb13e82b0cbe8e8ec11ac99"} Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.589203 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-qxznq" podStartSLOduration=27.897018033 podStartE2EDuration="30.589181661s" podCreationTimestamp="2026-02-16 13:20:58 +0000 UTC" firstStartedPulling="2026-02-16 13:21:25.057887007 +0000 UTC m=+1084.384600735" lastFinishedPulling="2026-02-16 13:21:27.750050645 +0000 UTC m=+1087.076764363" observedRunningTime="2026-02-16 13:21:28.579559878 +0000 UTC m=+1087.906273606" watchObservedRunningTime="2026-02-16 13:21:28.589181661 +0000 UTC m=+1087.915895389" Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.621071 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" podStartSLOduration=27.914542828 podStartE2EDuration="29.621052183s" podCreationTimestamp="2026-02-16 13:20:59 +0000 UTC" firstStartedPulling="2026-02-16 13:21:24.757011529 +0000 UTC m=+1084.083725257" lastFinishedPulling="2026-02-16 13:21:26.463520884 +0000 UTC m=+1085.790234612" observedRunningTime="2026-02-16 13:21:28.605896748 +0000 UTC m=+1087.932610476" watchObservedRunningTime="2026-02-16 13:21:28.621052183 +0000 UTC m=+1087.947765911" Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.640080 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=27.400338156 podStartE2EDuration="29.640065172s" podCreationTimestamp="2026-02-16 13:20:59 +0000 UTC" firstStartedPulling="2026-02-16 13:21:25.447740517 +0000 UTC m=+1084.774454245" lastFinishedPulling="2026-02-16 13:21:27.687467543 +0000 UTC m=+1087.014181261" observedRunningTime="2026-02-16 13:21:28.6242418 +0000 UTC m=+1087.950955548" watchObservedRunningTime="2026-02-16 13:21:28.640065172 +0000 UTC m=+1087.966778900" Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.654280 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.190950745 podStartE2EDuration="33.654257121s" podCreationTimestamp="2026-02-16 13:20:55 +0000 UTC" firstStartedPulling="2026-02-16 13:20:58.276888829 +0000 UTC m=+1057.603602557" lastFinishedPulling="2026-02-16 13:21:27.740195205 +0000 UTC m=+1087.066908933" observedRunningTime="2026-02-16 13:21:28.643274691 +0000 UTC m=+1087.969988439" watchObservedRunningTime="2026-02-16 13:21:28.654257121 +0000 UTC m=+1087.980970849" Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.941522 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-65qn9"] Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.986030 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-fbm7q"] Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.987723 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.992458 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 16 13:21:28 crc kubenswrapper[4816]: I0216 13:21:28.994735 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-fbm7q"] Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.054716 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.055061 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcq95\" (UniqueName: \"kubernetes.io/projected/5cde9788-671a-4384-9a93-47ce8dc91118-kube-api-access-pcq95\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.055125 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-config\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.055179 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-dns-svc\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.055229 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.156391 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcq95\" (UniqueName: \"kubernetes.io/projected/5cde9788-671a-4384-9a93-47ce8dc91118-kube-api-access-pcq95\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.156533 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-config\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.157551 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-config\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.157624 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-dns-svc\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.158339 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-dns-svc\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.158477 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.160082 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.160159 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.160875 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.182491 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcq95\" (UniqueName: \"kubernetes.io/projected/5cde9788-671a-4384-9a93-47ce8dc91118-kube-api-access-pcq95\") pod \"dnsmasq-dns-8554648995-fbm7q\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.301873 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.311360 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.373987 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-config\") pod \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.374093 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-dns-svc\") pod \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.374129 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdk6f\" (UniqueName: \"kubernetes.io/projected/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-kube-api-access-sdk6f\") pod \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\" (UID: \"e9c5c27a-8fd9-4218-bb63-2703f52b21fb\") " Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.374934 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-config" (OuterVolumeSpecName: "config") pod "e9c5c27a-8fd9-4218-bb63-2703f52b21fb" (UID: "e9c5c27a-8fd9-4218-bb63-2703f52b21fb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.375228 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e9c5c27a-8fd9-4218-bb63-2703f52b21fb" (UID: "e9c5c27a-8fd9-4218-bb63-2703f52b21fb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.377976 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-kube-api-access-sdk6f" (OuterVolumeSpecName: "kube-api-access-sdk6f") pod "e9c5c27a-8fd9-4218-bb63-2703f52b21fb" (UID: "e9c5c27a-8fd9-4218-bb63-2703f52b21fb"). InnerVolumeSpecName "kube-api-access-sdk6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.477418 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.477719 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.477735 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdk6f\" (UniqueName: \"kubernetes.io/projected/e9c5c27a-8fd9-4218-bb63-2703f52b21fb-kube-api-access-sdk6f\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.628198 4816 generic.go:334] "Generic (PLEG): container finished" podID="8cdb34b1-893f-4701-89b2-195db5c6c03b" containerID="9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf" exitCode=0 Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.628306 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8cdb34b1-893f-4701-89b2-195db5c6c03b","Type":"ContainerDied","Data":"9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf"} Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.651304 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9eb39773-46a3-4f31-a95a-64a183dbe417","Type":"ContainerStarted","Data":"1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646"} Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.656288 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-65qn9" event={"ID":"e9c5c27a-8fd9-4218-bb63-2703f52b21fb","Type":"ContainerDied","Data":"6e7950cabbbbd74aa81ba40cae963e51dbf08f307185611a5840ea9037bc61f3"} Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.656536 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-65qn9" Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.771605 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-65qn9"] Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.794636 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-65qn9"] Feb 16 13:21:29 crc kubenswrapper[4816]: I0216 13:21:29.841210 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-fbm7q"] Feb 16 13:21:29 crc kubenswrapper[4816]: W0216 13:21:29.846867 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cde9788_671a_4384_9a93_47ce8dc91118.slice/crio-1e28c50ee0a029e17f1fcf0298aa63fc1d043aa6c1f5bd913d928f5f94b1ee54 WatchSource:0}: Error finding container 1e28c50ee0a029e17f1fcf0298aa63fc1d043aa6c1f5bd913d928f5f94b1ee54: Status 404 returned error can't find the container with id 1e28c50ee0a029e17f1fcf0298aa63fc1d043aa6c1f5bd913d928f5f94b1ee54 Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.094989 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.134987 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.512444 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.512540 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.551480 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.666513 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8cdb34b1-893f-4701-89b2-195db5c6c03b","Type":"ContainerStarted","Data":"e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8"} Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.668702 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfcee51-c740-477a-87d9-558fffc58686","Type":"ContainerStarted","Data":"be23562396e8deb7c2fcd78ad08cf2775995e40b6eb695892ea897a7e1bfb880"} Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.670288 4816 generic.go:334] "Generic (PLEG): container finished" podID="4a9428a1-a54a-4e85-b898-1eac97438ba8" containerID="63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1" exitCode=0 Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.670365 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a9428a1-a54a-4e85-b898-1eac97438ba8","Type":"ContainerDied","Data":"63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1"} Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.672443 4816 generic.go:334] "Generic (PLEG): container finished" podID="5cde9788-671a-4384-9a93-47ce8dc91118" containerID="4609389cfa9bb393272b9d967e2f9eb65e430d776f90a86f51236044ecbeebdb" exitCode=0 Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.672544 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-fbm7q" event={"ID":"5cde9788-671a-4384-9a93-47ce8dc91118","Type":"ContainerDied","Data":"4609389cfa9bb393272b9d967e2f9eb65e430d776f90a86f51236044ecbeebdb"} Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.672594 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-fbm7q" event={"ID":"5cde9788-671a-4384-9a93-47ce8dc91118","Type":"ContainerStarted","Data":"1e28c50ee0a029e17f1fcf0298aa63fc1d043aa6c1f5bd913d928f5f94b1ee54"} Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.672788 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 16 13:21:30 crc kubenswrapper[4816]: I0216 13:21:30.688206 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=10.046317039 podStartE2EDuration="41.688186819s" podCreationTimestamp="2026-02-16 13:20:49 +0000 UTC" firstStartedPulling="2026-02-16 13:20:52.242686861 +0000 UTC m=+1051.569400589" lastFinishedPulling="2026-02-16 13:21:23.884556641 +0000 UTC m=+1083.211270369" observedRunningTime="2026-02-16 13:21:30.688167879 +0000 UTC m=+1090.014881627" watchObservedRunningTime="2026-02-16 13:21:30.688186819 +0000 UTC m=+1090.014900547" Feb 16 13:21:31 crc kubenswrapper[4816]: I0216 13:21:31.409286 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9c5c27a-8fd9-4218-bb63-2703f52b21fb" path="/var/lib/kubelet/pods/e9c5c27a-8fd9-4218-bb63-2703f52b21fb/volumes" Feb 16 13:21:31 crc kubenswrapper[4816]: I0216 13:21:31.603998 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 16 13:21:31 crc kubenswrapper[4816]: I0216 13:21:31.604054 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 16 13:21:31 crc kubenswrapper[4816]: I0216 13:21:31.680045 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a9428a1-a54a-4e85-b898-1eac97438ba8","Type":"ContainerStarted","Data":"9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29"} Feb 16 13:21:31 crc kubenswrapper[4816]: I0216 13:21:31.682101 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-fbm7q" event={"ID":"5cde9788-671a-4384-9a93-47ce8dc91118","Type":"ContainerStarted","Data":"d79bc0394a605a37483c447e5ea49c129eecf0b1c5909faf7747297376eeee4e"} Feb 16 13:21:31 crc kubenswrapper[4816]: I0216 13:21:31.709625 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=9.933926895 podStartE2EDuration="44.70960553s" podCreationTimestamp="2026-02-16 13:20:47 +0000 UTC" firstStartedPulling="2026-02-16 13:20:50.186037561 +0000 UTC m=+1049.512751289" lastFinishedPulling="2026-02-16 13:21:24.961716196 +0000 UTC m=+1084.288429924" observedRunningTime="2026-02-16 13:21:31.70156594 +0000 UTC m=+1091.028279688" watchObservedRunningTime="2026-02-16 13:21:31.70960553 +0000 UTC m=+1091.036319258" Feb 16 13:21:31 crc kubenswrapper[4816]: I0216 13:21:31.724387 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 16 13:21:31 crc kubenswrapper[4816]: I0216 13:21:31.725226 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-fbm7q" podStartSLOduration=3.725207717 podStartE2EDuration="3.725207717s" podCreationTimestamp="2026-02-16 13:21:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:21:31.721553036 +0000 UTC m=+1091.048266764" watchObservedRunningTime="2026-02-16 13:21:31.725207717 +0000 UTC m=+1091.051921445" Feb 16 13:21:32 crc kubenswrapper[4816]: I0216 13:21:32.688352 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:33 crc kubenswrapper[4816]: I0216 13:21:33.985442 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 16 13:21:34 crc kubenswrapper[4816]: I0216 13:21:34.810947 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.549815 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.745300 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.746528 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.750259 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.753495 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.754238 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-fpdll" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.754917 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.763237 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.878912 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-scripts\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.878961 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.879046 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.879079 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-config\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.879116 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59k8q\" (UniqueName: \"kubernetes.io/projected/8b7ff418-3104-4d5e-880c-bc9de7258943-kube-api-access-59k8q\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.879151 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.879199 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.981264 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.981348 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.981421 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-scripts\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.981456 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.981510 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.981534 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-config\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.981576 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59k8q\" (UniqueName: \"kubernetes.io/projected/8b7ff418-3104-4d5e-880c-bc9de7258943-kube-api-access-59k8q\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.982633 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.982940 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-config\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.983015 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-scripts\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.986942 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.986981 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:35 crc kubenswrapper[4816]: I0216 13:21:35.987123 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:36 crc kubenswrapper[4816]: I0216 13:21:36.002133 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59k8q\" (UniqueName: \"kubernetes.io/projected/8b7ff418-3104-4d5e-880c-bc9de7258943-kube-api-access-59k8q\") pod \"ovn-northd-0\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " pod="openstack/ovn-northd-0" Feb 16 13:21:36 crc kubenswrapper[4816]: I0216 13:21:36.067076 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 16 13:21:36 crc kubenswrapper[4816]: I0216 13:21:36.530245 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 16 13:21:36 crc kubenswrapper[4816]: W0216 13:21:36.545836 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b7ff418_3104_4d5e_880c_bc9de7258943.slice/crio-a3ddce87225cdbd688fbac4ea5c16e69acab2028e37dc77d12a23977d0c656cb WatchSource:0}: Error finding container a3ddce87225cdbd688fbac4ea5c16e69acab2028e37dc77d12a23977d0c656cb: Status 404 returned error can't find the container with id a3ddce87225cdbd688fbac4ea5c16e69acab2028e37dc77d12a23977d0c656cb Feb 16 13:21:36 crc kubenswrapper[4816]: I0216 13:21:36.717741 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8b7ff418-3104-4d5e-880c-bc9de7258943","Type":"ContainerStarted","Data":"a3ddce87225cdbd688fbac4ea5c16e69acab2028e37dc77d12a23977d0c656cb"} Feb 16 13:21:37 crc kubenswrapper[4816]: I0216 13:21:37.692559 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 16 13:21:37 crc kubenswrapper[4816]: I0216 13:21:37.762624 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.313882 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.367275 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xcsds"] Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.367518 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" podUID="f8ae7652-9970-431b-9eb8-69b03f1ba522" containerName="dnsmasq-dns" containerID="cri-o://111fb9c6576c67ac91c5280a8a6fc636e12d7f9277679d6b8a16966f95b005e9" gracePeriod=10 Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.390638 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.390698 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.481333 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.739809 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8b7ff418-3104-4d5e-880c-bc9de7258943","Type":"ContainerStarted","Data":"3ab81f6efce6fb86362c42eda7876dd469e8113fb561b222ea56c2868f292aeb"} Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.741565 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb","Type":"ContainerStarted","Data":"d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6"} Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.742798 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.747309 4816 generic.go:334] "Generic (PLEG): container finished" podID="f8ae7652-9970-431b-9eb8-69b03f1ba522" containerID="111fb9c6576c67ac91c5280a8a6fc636e12d7f9277679d6b8a16966f95b005e9" exitCode=0 Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.747397 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" event={"ID":"f8ae7652-9970-431b-9eb8-69b03f1ba522","Type":"ContainerDied","Data":"111fb9c6576c67ac91c5280a8a6fc636e12d7f9277679d6b8a16966f95b005e9"} Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.749293 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerID="5ab245e75ae4f94ade8ab4cfddcda099b8ce7f43e1b4902b3fb5af79e51718b4" exitCode=0 Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.749348 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rfd9r" event={"ID":"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b","Type":"ContainerDied","Data":"5ab245e75ae4f94ade8ab4cfddcda099b8ce7f43e1b4902b3fb5af79e51718b4"} Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.762807 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.65532943 podStartE2EDuration="48.762786738s" podCreationTimestamp="2026-02-16 13:20:51 +0000 UTC" firstStartedPulling="2026-02-16 13:20:52.876339458 +0000 UTC m=+1052.203053186" lastFinishedPulling="2026-02-16 13:21:38.983796766 +0000 UTC m=+1098.310510494" observedRunningTime="2026-02-16 13:21:39.758549971 +0000 UTC m=+1099.085263699" watchObservedRunningTime="2026-02-16 13:21:39.762786738 +0000 UTC m=+1099.089500466" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.841366 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.878564 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.950497 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6fmq\" (UniqueName: \"kubernetes.io/projected/f8ae7652-9970-431b-9eb8-69b03f1ba522-kube-api-access-p6fmq\") pod \"f8ae7652-9970-431b-9eb8-69b03f1ba522\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.951042 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-config\") pod \"f8ae7652-9970-431b-9eb8-69b03f1ba522\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.951090 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-dns-svc\") pod \"f8ae7652-9970-431b-9eb8-69b03f1ba522\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.951142 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-ovsdbserver-sb\") pod \"f8ae7652-9970-431b-9eb8-69b03f1ba522\" (UID: \"f8ae7652-9970-431b-9eb8-69b03f1ba522\") " Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.958225 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ae7652-9970-431b-9eb8-69b03f1ba522-kube-api-access-p6fmq" (OuterVolumeSpecName: "kube-api-access-p6fmq") pod "f8ae7652-9970-431b-9eb8-69b03f1ba522" (UID: "f8ae7652-9970-431b-9eb8-69b03f1ba522"). InnerVolumeSpecName "kube-api-access-p6fmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.993192 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f8ae7652-9970-431b-9eb8-69b03f1ba522" (UID: "f8ae7652-9970-431b-9eb8-69b03f1ba522"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:39 crc kubenswrapper[4816]: I0216 13:21:39.997235 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-config" (OuterVolumeSpecName: "config") pod "f8ae7652-9970-431b-9eb8-69b03f1ba522" (UID: "f8ae7652-9970-431b-9eb8-69b03f1ba522"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.009630 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f8ae7652-9970-431b-9eb8-69b03f1ba522" (UID: "f8ae7652-9970-431b-9eb8-69b03f1ba522"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.053435 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.053481 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.053491 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8ae7652-9970-431b-9eb8-69b03f1ba522-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.053502 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6fmq\" (UniqueName: \"kubernetes.io/projected/f8ae7652-9970-431b-9eb8-69b03f1ba522-kube-api-access-p6fmq\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.086822 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-z67ph"] Feb 16 13:21:40 crc kubenswrapper[4816]: E0216 13:21:40.087135 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ae7652-9970-431b-9eb8-69b03f1ba522" containerName="init" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.087146 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ae7652-9970-431b-9eb8-69b03f1ba522" containerName="init" Feb 16 13:21:40 crc kubenswrapper[4816]: E0216 13:21:40.087166 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ae7652-9970-431b-9eb8-69b03f1ba522" containerName="dnsmasq-dns" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.087171 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ae7652-9970-431b-9eb8-69b03f1ba522" containerName="dnsmasq-dns" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.087297 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ae7652-9970-431b-9eb8-69b03f1ba522" containerName="dnsmasq-dns" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.087772 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.091064 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.094429 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-z67ph"] Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.256059 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e4fead9-b349-4ffa-8946-09a600bd3139-operator-scripts\") pod \"root-account-create-update-z67ph\" (UID: \"1e4fead9-b349-4ffa-8946-09a600bd3139\") " pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.256108 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnqxh\" (UniqueName: \"kubernetes.io/projected/1e4fead9-b349-4ffa-8946-09a600bd3139-kube-api-access-hnqxh\") pod \"root-account-create-update-z67ph\" (UID: \"1e4fead9-b349-4ffa-8946-09a600bd3139\") " pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.358029 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e4fead9-b349-4ffa-8946-09a600bd3139-operator-scripts\") pod \"root-account-create-update-z67ph\" (UID: \"1e4fead9-b349-4ffa-8946-09a600bd3139\") " pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.358105 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnqxh\" (UniqueName: \"kubernetes.io/projected/1e4fead9-b349-4ffa-8946-09a600bd3139-kube-api-access-hnqxh\") pod \"root-account-create-update-z67ph\" (UID: \"1e4fead9-b349-4ffa-8946-09a600bd3139\") " pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.358984 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e4fead9-b349-4ffa-8946-09a600bd3139-operator-scripts\") pod \"root-account-create-update-z67ph\" (UID: \"1e4fead9-b349-4ffa-8946-09a600bd3139\") " pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.379378 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnqxh\" (UniqueName: \"kubernetes.io/projected/1e4fead9-b349-4ffa-8946-09a600bd3139-kube-api-access-hnqxh\") pod \"root-account-create-update-z67ph\" (UID: \"1e4fead9-b349-4ffa-8946-09a600bd3139\") " pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.418872 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.757820 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8b7ff418-3104-4d5e-880c-bc9de7258943","Type":"ContainerStarted","Data":"69ee0cac9e4f93da6f2382337f0f124d262804d3845fd42fa8d7a742bee8220b"} Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.759326 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.762859 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.762855 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" event={"ID":"f8ae7652-9970-431b-9eb8-69b03f1ba522","Type":"ContainerDied","Data":"f652c57e0dbcd42f27dd2cce7ac3b359cc6ca5f0e056864996a361dbcf359a71"} Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.763009 4816 scope.go:117] "RemoveContainer" containerID="111fb9c6576c67ac91c5280a8a6fc636e12d7f9277679d6b8a16966f95b005e9" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.767608 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rfd9r" event={"ID":"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b","Type":"ContainerStarted","Data":"7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c"} Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.767651 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rfd9r" event={"ID":"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b","Type":"ContainerStarted","Data":"8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744"} Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.768524 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.769360 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.786417 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.040637084 podStartE2EDuration="5.786396088s" podCreationTimestamp="2026-02-16 13:21:35 +0000 UTC" firstStartedPulling="2026-02-16 13:21:36.548725048 +0000 UTC m=+1095.875438776" lastFinishedPulling="2026-02-16 13:21:39.294484052 +0000 UTC m=+1098.621197780" observedRunningTime="2026-02-16 13:21:40.780702103 +0000 UTC m=+1100.107415831" watchObservedRunningTime="2026-02-16 13:21:40.786396088 +0000 UTC m=+1100.113109816" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.791487 4816 scope.go:117] "RemoveContainer" containerID="8aaafd076d6fd5857354bae1f19fde54138f82bc388b3d4be9613ecb6ee3e39e" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.804219 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-rfd9r" podStartSLOduration=4.390342663 podStartE2EDuration="44.804181866s" podCreationTimestamp="2026-02-16 13:20:56 +0000 UTC" firstStartedPulling="2026-02-16 13:20:58.458001262 +0000 UTC m=+1057.784714990" lastFinishedPulling="2026-02-16 13:21:38.871840445 +0000 UTC m=+1098.198554193" observedRunningTime="2026-02-16 13:21:40.802390116 +0000 UTC m=+1100.129103894" watchObservedRunningTime="2026-02-16 13:21:40.804181866 +0000 UTC m=+1100.130895594" Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.826490 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xcsds"] Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.836332 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-xcsds"] Feb 16 13:21:40 crc kubenswrapper[4816]: W0216 13:21:40.841858 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e4fead9_b349_4ffa_8946_09a600bd3139.slice/crio-58d24370bc6bc06ca85d7f52d7e84bc286b0ae13077678d8cfaae194bcbbdb11 WatchSource:0}: Error finding container 58d24370bc6bc06ca85d7f52d7e84bc286b0ae13077678d8cfaae194bcbbdb11: Status 404 returned error can't find the container with id 58d24370bc6bc06ca85d7f52d7e84bc286b0ae13077678d8cfaae194bcbbdb11 Feb 16 13:21:40 crc kubenswrapper[4816]: I0216 13:21:40.842602 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-z67ph"] Feb 16 13:21:41 crc kubenswrapper[4816]: I0216 13:21:41.408599 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ae7652-9970-431b-9eb8-69b03f1ba522" path="/var/lib/kubelet/pods/f8ae7652-9970-431b-9eb8-69b03f1ba522/volumes" Feb 16 13:21:41 crc kubenswrapper[4816]: I0216 13:21:41.774685 4816 generic.go:334] "Generic (PLEG): container finished" podID="1e4fead9-b349-4ffa-8946-09a600bd3139" containerID="9401e9bc0e7d30596699b27b35b8c23305237b7533a1ac0a6c3b64a65f9f4905" exitCode=0 Feb 16 13:21:41 crc kubenswrapper[4816]: I0216 13:21:41.774768 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-z67ph" event={"ID":"1e4fead9-b349-4ffa-8946-09a600bd3139","Type":"ContainerDied","Data":"9401e9bc0e7d30596699b27b35b8c23305237b7533a1ac0a6c3b64a65f9f4905"} Feb 16 13:21:41 crc kubenswrapper[4816]: I0216 13:21:41.775002 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-z67ph" event={"ID":"1e4fead9-b349-4ffa-8946-09a600bd3139","Type":"ContainerStarted","Data":"58d24370bc6bc06ca85d7f52d7e84bc286b0ae13077678d8cfaae194bcbbdb11"} Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.075463 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-nnjgp"] Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.076841 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.090916 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-8fa6-account-create-update-t2hcp"] Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.102768 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nnjgp"] Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.103229 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.106792 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.118702 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8fa6-account-create-update-t2hcp"] Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.188584 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca667270-ef70-4131-a442-3600cdb034c9-operator-scripts\") pod \"keystone-8fa6-account-create-update-t2hcp\" (UID: \"ca667270-ef70-4131-a442-3600cdb034c9\") " pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.188643 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d60f290e-fa31-45c1-a6ec-857a7ac94394-operator-scripts\") pod \"keystone-db-create-nnjgp\" (UID: \"d60f290e-fa31-45c1-a6ec-857a7ac94394\") " pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.188698 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg7zr\" (UniqueName: \"kubernetes.io/projected/ca667270-ef70-4131-a442-3600cdb034c9-kube-api-access-gg7zr\") pod \"keystone-8fa6-account-create-update-t2hcp\" (UID: \"ca667270-ef70-4131-a442-3600cdb034c9\") " pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.188730 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhxdb\" (UniqueName: \"kubernetes.io/projected/d60f290e-fa31-45c1-a6ec-857a7ac94394-kube-api-access-lhxdb\") pod \"keystone-db-create-nnjgp\" (UID: \"d60f290e-fa31-45c1-a6ec-857a7ac94394\") " pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.246899 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-lfw76"] Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.248007 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lfw76" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.272239 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6847-account-create-update-lb227"] Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.273779 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.278156 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.293692 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca667270-ef70-4131-a442-3600cdb034c9-operator-scripts\") pod \"keystone-8fa6-account-create-update-t2hcp\" (UID: \"ca667270-ef70-4131-a442-3600cdb034c9\") " pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.293736 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d60f290e-fa31-45c1-a6ec-857a7ac94394-operator-scripts\") pod \"keystone-db-create-nnjgp\" (UID: \"d60f290e-fa31-45c1-a6ec-857a7ac94394\") " pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.293816 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg7zr\" (UniqueName: \"kubernetes.io/projected/ca667270-ef70-4131-a442-3600cdb034c9-kube-api-access-gg7zr\") pod \"keystone-8fa6-account-create-update-t2hcp\" (UID: \"ca667270-ef70-4131-a442-3600cdb034c9\") " pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.293858 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhxdb\" (UniqueName: \"kubernetes.io/projected/d60f290e-fa31-45c1-a6ec-857a7ac94394-kube-api-access-lhxdb\") pod \"keystone-db-create-nnjgp\" (UID: \"d60f290e-fa31-45c1-a6ec-857a7ac94394\") " pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.295040 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca667270-ef70-4131-a442-3600cdb034c9-operator-scripts\") pod \"keystone-8fa6-account-create-update-t2hcp\" (UID: \"ca667270-ef70-4131-a442-3600cdb034c9\") " pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.295520 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d60f290e-fa31-45c1-a6ec-857a7ac94394-operator-scripts\") pod \"keystone-db-create-nnjgp\" (UID: \"d60f290e-fa31-45c1-a6ec-857a7ac94394\") " pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.302415 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lfw76"] Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.314875 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg7zr\" (UniqueName: \"kubernetes.io/projected/ca667270-ef70-4131-a442-3600cdb034c9-kube-api-access-gg7zr\") pod \"keystone-8fa6-account-create-update-t2hcp\" (UID: \"ca667270-ef70-4131-a442-3600cdb034c9\") " pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.315581 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhxdb\" (UniqueName: \"kubernetes.io/projected/d60f290e-fa31-45c1-a6ec-857a7ac94394-kube-api-access-lhxdb\") pod \"keystone-db-create-nnjgp\" (UID: \"d60f290e-fa31-45c1-a6ec-857a7ac94394\") " pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.316199 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6847-account-create-update-lb227"] Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.395712 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-operator-scripts\") pod \"placement-db-create-lfw76\" (UID: \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\") " pod="openstack/placement-db-create-lfw76" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.395810 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4843e372-830c-4b62-a21b-6e9fe41d7973-operator-scripts\") pod \"placement-6847-account-create-update-lb227\" (UID: \"4843e372-830c-4b62-a21b-6e9fe41d7973\") " pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.395957 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bjzb\" (UniqueName: \"kubernetes.io/projected/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-kube-api-access-9bjzb\") pod \"placement-db-create-lfw76\" (UID: \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\") " pod="openstack/placement-db-create-lfw76" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.396115 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z69d8\" (UniqueName: \"kubernetes.io/projected/4843e372-830c-4b62-a21b-6e9fe41d7973-kube-api-access-z69d8\") pod \"placement-6847-account-create-update-lb227\" (UID: \"4843e372-830c-4b62-a21b-6e9fe41d7973\") " pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.433095 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.445983 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.498719 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bjzb\" (UniqueName: \"kubernetes.io/projected/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-kube-api-access-9bjzb\") pod \"placement-db-create-lfw76\" (UID: \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\") " pod="openstack/placement-db-create-lfw76" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.498820 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z69d8\" (UniqueName: \"kubernetes.io/projected/4843e372-830c-4b62-a21b-6e9fe41d7973-kube-api-access-z69d8\") pod \"placement-6847-account-create-update-lb227\" (UID: \"4843e372-830c-4b62-a21b-6e9fe41d7973\") " pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.498911 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-operator-scripts\") pod \"placement-db-create-lfw76\" (UID: \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\") " pod="openstack/placement-db-create-lfw76" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.498937 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4843e372-830c-4b62-a21b-6e9fe41d7973-operator-scripts\") pod \"placement-6847-account-create-update-lb227\" (UID: \"4843e372-830c-4b62-a21b-6e9fe41d7973\") " pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.500005 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4843e372-830c-4b62-a21b-6e9fe41d7973-operator-scripts\") pod \"placement-6847-account-create-update-lb227\" (UID: \"4843e372-830c-4b62-a21b-6e9fe41d7973\") " pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.500487 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-operator-scripts\") pod \"placement-db-create-lfw76\" (UID: \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\") " pod="openstack/placement-db-create-lfw76" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.520574 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z69d8\" (UniqueName: \"kubernetes.io/projected/4843e372-830c-4b62-a21b-6e9fe41d7973-kube-api-access-z69d8\") pod \"placement-6847-account-create-update-lb227\" (UID: \"4843e372-830c-4b62-a21b-6e9fe41d7973\") " pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.531532 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bjzb\" (UniqueName: \"kubernetes.io/projected/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-kube-api-access-9bjzb\") pod \"placement-db-create-lfw76\" (UID: \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\") " pod="openstack/placement-db-create-lfw76" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.566256 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lfw76" Feb 16 13:21:42 crc kubenswrapper[4816]: I0216 13:21:42.594223 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.661014 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8fa6-account-create-update-t2hcp"] Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.822011 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.866808 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nnjgp"] Feb 16 13:21:43 crc kubenswrapper[4816]: W0216 13:21:43.869781 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd60f290e_fa31_45c1_a6ec_857a7ac94394.slice/crio-666ff902be368431d2615c6ebeaf1ac1c916d8309fdab6fa81edbd47077b60d5 WatchSource:0}: Error finding container 666ff902be368431d2615c6ebeaf1ac1c916d8309fdab6fa81edbd47077b60d5: Status 404 returned error can't find the container with id 666ff902be368431d2615c6ebeaf1ac1c916d8309fdab6fa81edbd47077b60d5 Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.888846 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnqxh\" (UniqueName: \"kubernetes.io/projected/1e4fead9-b349-4ffa-8946-09a600bd3139-kube-api-access-hnqxh\") pod \"1e4fead9-b349-4ffa-8946-09a600bd3139\" (UID: \"1e4fead9-b349-4ffa-8946-09a600bd3139\") " Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.890057 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e4fead9-b349-4ffa-8946-09a600bd3139-operator-scripts\") pod \"1e4fead9-b349-4ffa-8946-09a600bd3139\" (UID: \"1e4fead9-b349-4ffa-8946-09a600bd3139\") " Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.891159 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e4fead9-b349-4ffa-8946-09a600bd3139-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1e4fead9-b349-4ffa-8946-09a600bd3139" (UID: "1e4fead9-b349-4ffa-8946-09a600bd3139"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.895302 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e4fead9-b349-4ffa-8946-09a600bd3139-kube-api-access-hnqxh" (OuterVolumeSpecName: "kube-api-access-hnqxh") pod "1e4fead9-b349-4ffa-8946-09a600bd3139" (UID: "1e4fead9-b349-4ffa-8946-09a600bd3139"). InnerVolumeSpecName "kube-api-access-hnqxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.914600 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lfw76"] Feb 16 13:21:43 crc kubenswrapper[4816]: W0216 13:21:43.921286 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbdb6e3b2_f49c_4d96_81e7_3fc6c360425f.slice/crio-fa8aff59b32e23562004f70a3a468f8edc576fb91780318047332e9eb0a7142b WatchSource:0}: Error finding container fa8aff59b32e23562004f70a3a468f8edc576fb91780318047332e9eb0a7142b: Status 404 returned error can't find the container with id fa8aff59b32e23562004f70a3a468f8edc576fb91780318047332e9eb0a7142b Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.964504 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nnjgp" event={"ID":"d60f290e-fa31-45c1-a6ec-857a7ac94394","Type":"ContainerStarted","Data":"666ff902be368431d2615c6ebeaf1ac1c916d8309fdab6fa81edbd47077b60d5"} Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.966041 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lfw76" event={"ID":"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f","Type":"ContainerStarted","Data":"fa8aff59b32e23562004f70a3a468f8edc576fb91780318047332e9eb0a7142b"} Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.969373 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8fa6-account-create-update-t2hcp" event={"ID":"ca667270-ef70-4131-a442-3600cdb034c9","Type":"ContainerStarted","Data":"581ca028c0ca32ee280e69406751f61f447512e6984f0d2dbcc8b22666a5614b"} Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.969413 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8fa6-account-create-update-t2hcp" event={"ID":"ca667270-ef70-4131-a442-3600cdb034c9","Type":"ContainerStarted","Data":"86120ca565906175aec8a19e8cb690bea6805355ce33d1bc0589287ca2f9bbdd"} Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.974823 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-z67ph" event={"ID":"1e4fead9-b349-4ffa-8946-09a600bd3139","Type":"ContainerDied","Data":"58d24370bc6bc06ca85d7f52d7e84bc286b0ae13077678d8cfaae194bcbbdb11"} Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.974856 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58d24370bc6bc06ca85d7f52d7e84bc286b0ae13077678d8cfaae194bcbbdb11" Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.974916 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-z67ph" Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.993438 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnqxh\" (UniqueName: \"kubernetes.io/projected/1e4fead9-b349-4ffa-8946-09a600bd3139-kube-api-access-hnqxh\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:43 crc kubenswrapper[4816]: I0216 13:21:43.993465 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e4fead9-b349-4ffa-8946-09a600bd3139-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.003904 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-8fa6-account-create-update-t2hcp" podStartSLOduration=2.003885712 podStartE2EDuration="2.003885712s" podCreationTimestamp="2026-02-16 13:21:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:21:43.992534972 +0000 UTC m=+1103.319248740" watchObservedRunningTime="2026-02-16 13:21:44.003885712 +0000 UTC m=+1103.330599430" Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.009498 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6847-account-create-update-lb227"] Feb 16 13:21:44 crc kubenswrapper[4816]: W0216 13:21:44.010202 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4843e372_830c_4b62_a21b_6e9fe41d7973.slice/crio-9ecffd1ed0d67999e9738d446eeba6c96a74801312cdde1f5efc8ddaf0fdb19c WatchSource:0}: Error finding container 9ecffd1ed0d67999e9738d446eeba6c96a74801312cdde1f5efc8ddaf0fdb19c: Status 404 returned error can't find the container with id 9ecffd1ed0d67999e9738d446eeba6c96a74801312cdde1f5efc8ddaf0fdb19c Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.811446 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6bc7876d45-xcsds" podUID="f8ae7652-9970-431b-9eb8-69b03f1ba522" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.107:5353: i/o timeout" Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.982784 4816 generic.go:334] "Generic (PLEG): container finished" podID="d60f290e-fa31-45c1-a6ec-857a7ac94394" containerID="a69d3b00643ccd3897caf83c0b51bd02143f4dc6c41e62039441a9238ec6fb07" exitCode=0 Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.982831 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nnjgp" event={"ID":"d60f290e-fa31-45c1-a6ec-857a7ac94394","Type":"ContainerDied","Data":"a69d3b00643ccd3897caf83c0b51bd02143f4dc6c41e62039441a9238ec6fb07"} Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.984282 4816 generic.go:334] "Generic (PLEG): container finished" podID="bdb6e3b2-f49c-4d96-81e7-3fc6c360425f" containerID="bcea712aa54e81de52474aa92b168797ba2d5c183cbb8b20e136d9c44d98a7ca" exitCode=0 Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.984342 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lfw76" event={"ID":"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f","Type":"ContainerDied","Data":"bcea712aa54e81de52474aa92b168797ba2d5c183cbb8b20e136d9c44d98a7ca"} Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.986410 4816 generic.go:334] "Generic (PLEG): container finished" podID="4843e372-830c-4b62-a21b-6e9fe41d7973" containerID="a52319a3a18c673b7ada74018de88a1b6d2bfa72422d9eec639505e94d2a6b88" exitCode=0 Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.986589 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6847-account-create-update-lb227" event={"ID":"4843e372-830c-4b62-a21b-6e9fe41d7973","Type":"ContainerDied","Data":"a52319a3a18c673b7ada74018de88a1b6d2bfa72422d9eec639505e94d2a6b88"} Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.986812 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6847-account-create-update-lb227" event={"ID":"4843e372-830c-4b62-a21b-6e9fe41d7973","Type":"ContainerStarted","Data":"9ecffd1ed0d67999e9738d446eeba6c96a74801312cdde1f5efc8ddaf0fdb19c"} Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.988383 4816 generic.go:334] "Generic (PLEG): container finished" podID="ca667270-ef70-4131-a442-3600cdb034c9" containerID="581ca028c0ca32ee280e69406751f61f447512e6984f0d2dbcc8b22666a5614b" exitCode=0 Feb 16 13:21:44 crc kubenswrapper[4816]: I0216 13:21:44.988416 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8fa6-account-create-update-t2hcp" event={"ID":"ca667270-ef70-4131-a442-3600cdb034c9","Type":"ContainerDied","Data":"581ca028c0ca32ee280e69406751f61f447512e6984f0d2dbcc8b22666a5614b"} Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.408362 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lfw76" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.430895 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bjzb\" (UniqueName: \"kubernetes.io/projected/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-kube-api-access-9bjzb\") pod \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\" (UID: \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\") " Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.431095 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-operator-scripts\") pod \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\" (UID: \"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f\") " Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.434545 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bdb6e3b2-f49c-4d96-81e7-3fc6c360425f" (UID: "bdb6e3b2-f49c-4d96-81e7-3fc6c360425f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.439794 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-kube-api-access-9bjzb" (OuterVolumeSpecName: "kube-api-access-9bjzb") pod "bdb6e3b2-f49c-4d96-81e7-3fc6c360425f" (UID: "bdb6e3b2-f49c-4d96-81e7-3fc6c360425f"). InnerVolumeSpecName "kube-api-access-9bjzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.532927 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.532966 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bjzb\" (UniqueName: \"kubernetes.io/projected/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f-kube-api-access-9bjzb\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.544284 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.550136 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.559071 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.634204 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d60f290e-fa31-45c1-a6ec-857a7ac94394-operator-scripts\") pod \"d60f290e-fa31-45c1-a6ec-857a7ac94394\" (UID: \"d60f290e-fa31-45c1-a6ec-857a7ac94394\") " Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.634537 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4843e372-830c-4b62-a21b-6e9fe41d7973-operator-scripts\") pod \"4843e372-830c-4b62-a21b-6e9fe41d7973\" (UID: \"4843e372-830c-4b62-a21b-6e9fe41d7973\") " Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.634685 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gg7zr\" (UniqueName: \"kubernetes.io/projected/ca667270-ef70-4131-a442-3600cdb034c9-kube-api-access-gg7zr\") pod \"ca667270-ef70-4131-a442-3600cdb034c9\" (UID: \"ca667270-ef70-4131-a442-3600cdb034c9\") " Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.634791 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhxdb\" (UniqueName: \"kubernetes.io/projected/d60f290e-fa31-45c1-a6ec-857a7ac94394-kube-api-access-lhxdb\") pod \"d60f290e-fa31-45c1-a6ec-857a7ac94394\" (UID: \"d60f290e-fa31-45c1-a6ec-857a7ac94394\") " Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.634877 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca667270-ef70-4131-a442-3600cdb034c9-operator-scripts\") pod \"ca667270-ef70-4131-a442-3600cdb034c9\" (UID: \"ca667270-ef70-4131-a442-3600cdb034c9\") " Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.634991 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z69d8\" (UniqueName: \"kubernetes.io/projected/4843e372-830c-4b62-a21b-6e9fe41d7973-kube-api-access-z69d8\") pod \"4843e372-830c-4b62-a21b-6e9fe41d7973\" (UID: \"4843e372-830c-4b62-a21b-6e9fe41d7973\") " Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.635095 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d60f290e-fa31-45c1-a6ec-857a7ac94394-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d60f290e-fa31-45c1-a6ec-857a7ac94394" (UID: "d60f290e-fa31-45c1-a6ec-857a7ac94394"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.635149 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4843e372-830c-4b62-a21b-6e9fe41d7973-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4843e372-830c-4b62-a21b-6e9fe41d7973" (UID: "4843e372-830c-4b62-a21b-6e9fe41d7973"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.635536 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d60f290e-fa31-45c1-a6ec-857a7ac94394-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.635643 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4843e372-830c-4b62-a21b-6e9fe41d7973-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.635645 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca667270-ef70-4131-a442-3600cdb034c9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ca667270-ef70-4131-a442-3600cdb034c9" (UID: "ca667270-ef70-4131-a442-3600cdb034c9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.637930 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca667270-ef70-4131-a442-3600cdb034c9-kube-api-access-gg7zr" (OuterVolumeSpecName: "kube-api-access-gg7zr") pod "ca667270-ef70-4131-a442-3600cdb034c9" (UID: "ca667270-ef70-4131-a442-3600cdb034c9"). InnerVolumeSpecName "kube-api-access-gg7zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.637986 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d60f290e-fa31-45c1-a6ec-857a7ac94394-kube-api-access-lhxdb" (OuterVolumeSpecName: "kube-api-access-lhxdb") pod "d60f290e-fa31-45c1-a6ec-857a7ac94394" (UID: "d60f290e-fa31-45c1-a6ec-857a7ac94394"). InnerVolumeSpecName "kube-api-access-lhxdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.638424 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4843e372-830c-4b62-a21b-6e9fe41d7973-kube-api-access-z69d8" (OuterVolumeSpecName: "kube-api-access-z69d8") pod "4843e372-830c-4b62-a21b-6e9fe41d7973" (UID: "4843e372-830c-4b62-a21b-6e9fe41d7973"). InnerVolumeSpecName "kube-api-access-z69d8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.737498 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gg7zr\" (UniqueName: \"kubernetes.io/projected/ca667270-ef70-4131-a442-3600cdb034c9-kube-api-access-gg7zr\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.737537 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhxdb\" (UniqueName: \"kubernetes.io/projected/d60f290e-fa31-45c1-a6ec-857a7ac94394-kube-api-access-lhxdb\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.737547 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca667270-ef70-4131-a442-3600cdb034c9-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.737558 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z69d8\" (UniqueName: \"kubernetes.io/projected/4843e372-830c-4b62-a21b-6e9fe41d7973-kube-api-access-z69d8\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:46 crc kubenswrapper[4816]: I0216 13:21:46.811154 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.004377 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nnjgp" event={"ID":"d60f290e-fa31-45c1-a6ec-857a7ac94394","Type":"ContainerDied","Data":"666ff902be368431d2615c6ebeaf1ac1c916d8309fdab6fa81edbd47077b60d5"} Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.004412 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="666ff902be368431d2615c6ebeaf1ac1c916d8309fdab6fa81edbd47077b60d5" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.004480 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nnjgp" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.015390 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lfw76" event={"ID":"bdb6e3b2-f49c-4d96-81e7-3fc6c360425f","Type":"ContainerDied","Data":"fa8aff59b32e23562004f70a3a468f8edc576fb91780318047332e9eb0a7142b"} Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.015435 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa8aff59b32e23562004f70a3a468f8edc576fb91780318047332e9eb0a7142b" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.015409 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lfw76" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.018816 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6847-account-create-update-lb227" event={"ID":"4843e372-830c-4b62-a21b-6e9fe41d7973","Type":"ContainerDied","Data":"9ecffd1ed0d67999e9738d446eeba6c96a74801312cdde1f5efc8ddaf0fdb19c"} Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.018854 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ecffd1ed0d67999e9738d446eeba6c96a74801312cdde1f5efc8ddaf0fdb19c" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.018894 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6847-account-create-update-lb227" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.022286 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8fa6-account-create-update-t2hcp" event={"ID":"ca667270-ef70-4131-a442-3600cdb034c9","Type":"ContainerDied","Data":"86120ca565906175aec8a19e8cb690bea6805355ce33d1bc0589287ca2f9bbdd"} Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.022480 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86120ca565906175aec8a19e8cb690bea6805355ce33d1bc0589287ca2f9bbdd" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.022592 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8fa6-account-create-update-t2hcp" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.029291 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-jwglz"] Feb 16 13:21:47 crc kubenswrapper[4816]: E0216 13:21:47.029695 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e4fead9-b349-4ffa-8946-09a600bd3139" containerName="mariadb-account-create-update" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.029718 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e4fead9-b349-4ffa-8946-09a600bd3139" containerName="mariadb-account-create-update" Feb 16 13:21:47 crc kubenswrapper[4816]: E0216 13:21:47.029741 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d60f290e-fa31-45c1-a6ec-857a7ac94394" containerName="mariadb-database-create" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.029753 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d60f290e-fa31-45c1-a6ec-857a7ac94394" containerName="mariadb-database-create" Feb 16 13:21:47 crc kubenswrapper[4816]: E0216 13:21:47.029769 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdb6e3b2-f49c-4d96-81e7-3fc6c360425f" containerName="mariadb-database-create" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.029777 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdb6e3b2-f49c-4d96-81e7-3fc6c360425f" containerName="mariadb-database-create" Feb 16 13:21:47 crc kubenswrapper[4816]: E0216 13:21:47.029812 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca667270-ef70-4131-a442-3600cdb034c9" containerName="mariadb-account-create-update" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.029821 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca667270-ef70-4131-a442-3600cdb034c9" containerName="mariadb-account-create-update" Feb 16 13:21:47 crc kubenswrapper[4816]: E0216 13:21:47.029842 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4843e372-830c-4b62-a21b-6e9fe41d7973" containerName="mariadb-account-create-update" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.029848 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4843e372-830c-4b62-a21b-6e9fe41d7973" containerName="mariadb-account-create-update" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.030061 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e4fead9-b349-4ffa-8946-09a600bd3139" containerName="mariadb-account-create-update" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.030080 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdb6e3b2-f49c-4d96-81e7-3fc6c360425f" containerName="mariadb-database-create" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.030091 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4843e372-830c-4b62-a21b-6e9fe41d7973" containerName="mariadb-account-create-update" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.030102 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d60f290e-fa31-45c1-a6ec-857a7ac94394" containerName="mariadb-database-create" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.030114 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca667270-ef70-4131-a442-3600cdb034c9" containerName="mariadb-account-create-update" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.030740 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jwglz" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.035851 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-jwglz"] Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.041102 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xkd5\" (UniqueName: \"kubernetes.io/projected/da21e294-2b36-4937-80ec-15c429fe6be8-kube-api-access-6xkd5\") pod \"glance-db-create-jwglz\" (UID: \"da21e294-2b36-4937-80ec-15c429fe6be8\") " pod="openstack/glance-db-create-jwglz" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.041177 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da21e294-2b36-4937-80ec-15c429fe6be8-operator-scripts\") pod \"glance-db-create-jwglz\" (UID: \"da21e294-2b36-4937-80ec-15c429fe6be8\") " pod="openstack/glance-db-create-jwglz" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.113272 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-1657-account-create-update-9lhq5"] Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.114473 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.119510 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.122622 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-1657-account-create-update-9lhq5"] Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.142679 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-operator-scripts\") pod \"glance-1657-account-create-update-9lhq5\" (UID: \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\") " pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.142747 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xkd5\" (UniqueName: \"kubernetes.io/projected/da21e294-2b36-4937-80ec-15c429fe6be8-kube-api-access-6xkd5\") pod \"glance-db-create-jwglz\" (UID: \"da21e294-2b36-4937-80ec-15c429fe6be8\") " pod="openstack/glance-db-create-jwglz" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.142789 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsbhz\" (UniqueName: \"kubernetes.io/projected/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-kube-api-access-dsbhz\") pod \"glance-1657-account-create-update-9lhq5\" (UID: \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\") " pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.142847 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da21e294-2b36-4937-80ec-15c429fe6be8-operator-scripts\") pod \"glance-db-create-jwglz\" (UID: \"da21e294-2b36-4937-80ec-15c429fe6be8\") " pod="openstack/glance-db-create-jwglz" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.143902 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da21e294-2b36-4937-80ec-15c429fe6be8-operator-scripts\") pod \"glance-db-create-jwglz\" (UID: \"da21e294-2b36-4937-80ec-15c429fe6be8\") " pod="openstack/glance-db-create-jwglz" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.163500 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xkd5\" (UniqueName: \"kubernetes.io/projected/da21e294-2b36-4937-80ec-15c429fe6be8-kube-api-access-6xkd5\") pod \"glance-db-create-jwglz\" (UID: \"da21e294-2b36-4937-80ec-15c429fe6be8\") " pod="openstack/glance-db-create-jwglz" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.243528 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-operator-scripts\") pod \"glance-1657-account-create-update-9lhq5\" (UID: \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\") " pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.243682 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsbhz\" (UniqueName: \"kubernetes.io/projected/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-kube-api-access-dsbhz\") pod \"glance-1657-account-create-update-9lhq5\" (UID: \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\") " pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.244265 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-operator-scripts\") pod \"glance-1657-account-create-update-9lhq5\" (UID: \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\") " pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.280524 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsbhz\" (UniqueName: \"kubernetes.io/projected/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-kube-api-access-dsbhz\") pod \"glance-1657-account-create-update-9lhq5\" (UID: \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\") " pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.361866 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jwglz" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.436579 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:47 crc kubenswrapper[4816]: I0216 13:21:47.855527 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-jwglz"] Feb 16 13:21:47 crc kubenswrapper[4816]: W0216 13:21:47.863251 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda21e294_2b36_4937_80ec_15c429fe6be8.slice/crio-94200609609e807a047776486ac75338d372e3785a6ee85e5a0f7e1e15a2a7f1 WatchSource:0}: Error finding container 94200609609e807a047776486ac75338d372e3785a6ee85e5a0f7e1e15a2a7f1: Status 404 returned error can't find the container with id 94200609609e807a047776486ac75338d372e3785a6ee85e5a0f7e1e15a2a7f1 Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.005784 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-1657-account-create-update-9lhq5"] Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.015791 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-z67ph"] Feb 16 13:21:48 crc kubenswrapper[4816]: W0216 13:21:48.018569 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf23d2bf5_14b2_4681_944f_cdb0aedf86c1.slice/crio-0f9519147e0b28538ef589381e93f95a6fd76e26119198a70e18b62b61f1100a WatchSource:0}: Error finding container 0f9519147e0b28538ef589381e93f95a6fd76e26119198a70e18b62b61f1100a: Status 404 returned error can't find the container with id 0f9519147e0b28538ef589381e93f95a6fd76e26119198a70e18b62b61f1100a Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.023620 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-z67ph"] Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.030018 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-jwglz" event={"ID":"da21e294-2b36-4937-80ec-15c429fe6be8","Type":"ContainerStarted","Data":"94200609609e807a047776486ac75338d372e3785a6ee85e5a0f7e1e15a2a7f1"} Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.031216 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1657-account-create-update-9lhq5" event={"ID":"f23d2bf5-14b2-4681-944f-cdb0aedf86c1","Type":"ContainerStarted","Data":"0f9519147e0b28538ef589381e93f95a6fd76e26119198a70e18b62b61f1100a"} Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.125821 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-lrf8c"] Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.126769 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.129045 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.142136 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-lrf8c"] Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.158185 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f169a08-6015-443c-a9d9-5e3f55e5ef58-operator-scripts\") pod \"root-account-create-update-lrf8c\" (UID: \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\") " pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.158265 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rkm2\" (UniqueName: \"kubernetes.io/projected/9f169a08-6015-443c-a9d9-5e3f55e5ef58-kube-api-access-4rkm2\") pod \"root-account-create-update-lrf8c\" (UID: \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\") " pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.259465 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rkm2\" (UniqueName: \"kubernetes.io/projected/9f169a08-6015-443c-a9d9-5e3f55e5ef58-kube-api-access-4rkm2\") pod \"root-account-create-update-lrf8c\" (UID: \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\") " pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.259610 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f169a08-6015-443c-a9d9-5e3f55e5ef58-operator-scripts\") pod \"root-account-create-update-lrf8c\" (UID: \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\") " pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.260495 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f169a08-6015-443c-a9d9-5e3f55e5ef58-operator-scripts\") pod \"root-account-create-update-lrf8c\" (UID: \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\") " pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.277921 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rkm2\" (UniqueName: \"kubernetes.io/projected/9f169a08-6015-443c-a9d9-5e3f55e5ef58-kube-api-access-4rkm2\") pod \"root-account-create-update-lrf8c\" (UID: \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\") " pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.440860 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:48 crc kubenswrapper[4816]: I0216 13:21:48.859859 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-lrf8c"] Feb 16 13:21:49 crc kubenswrapper[4816]: I0216 13:21:49.038500 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-lrf8c" event={"ID":"9f169a08-6015-443c-a9d9-5e3f55e5ef58","Type":"ContainerStarted","Data":"80db716e3daf7dccdb960d0101521240725a4a2754c21a25fd3fae9a938eb708"} Feb 16 13:21:49 crc kubenswrapper[4816]: I0216 13:21:49.038534 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-lrf8c" event={"ID":"9f169a08-6015-443c-a9d9-5e3f55e5ef58","Type":"ContainerStarted","Data":"e39f589d8d2ecb31287502558d54a7c2c7deca73b1b63d632990fad6372fc96d"} Feb 16 13:21:49 crc kubenswrapper[4816]: I0216 13:21:49.040690 4816 generic.go:334] "Generic (PLEG): container finished" podID="da21e294-2b36-4937-80ec-15c429fe6be8" containerID="7ca1dd77f7cc1f4b3821489cc3c35cd369aec47735907be42c9c819ae55d1672" exitCode=0 Feb 16 13:21:49 crc kubenswrapper[4816]: I0216 13:21:49.040756 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-jwglz" event={"ID":"da21e294-2b36-4937-80ec-15c429fe6be8","Type":"ContainerDied","Data":"7ca1dd77f7cc1f4b3821489cc3c35cd369aec47735907be42c9c819ae55d1672"} Feb 16 13:21:49 crc kubenswrapper[4816]: I0216 13:21:49.042525 4816 generic.go:334] "Generic (PLEG): container finished" podID="f23d2bf5-14b2-4681-944f-cdb0aedf86c1" containerID="1307433013f8fc69cb64509a7a0a11818f2c756bea9194bef6578614e0d17a73" exitCode=0 Feb 16 13:21:49 crc kubenswrapper[4816]: I0216 13:21:49.042549 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1657-account-create-update-9lhq5" event={"ID":"f23d2bf5-14b2-4681-944f-cdb0aedf86c1","Type":"ContainerDied","Data":"1307433013f8fc69cb64509a7a0a11818f2c756bea9194bef6578614e0d17a73"} Feb 16 13:21:49 crc kubenswrapper[4816]: I0216 13:21:49.055860 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-lrf8c" podStartSLOduration=1.05584101 podStartE2EDuration="1.05584101s" podCreationTimestamp="2026-02-16 13:21:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:21:49.054908985 +0000 UTC m=+1108.381622713" watchObservedRunningTime="2026-02-16 13:21:49.05584101 +0000 UTC m=+1108.382554738" Feb 16 13:21:49 crc kubenswrapper[4816]: I0216 13:21:49.411332 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e4fead9-b349-4ffa-8946-09a600bd3139" path="/var/lib/kubelet/pods/1e4fead9-b349-4ffa-8946-09a600bd3139/volumes" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.054327 4816 generic.go:334] "Generic (PLEG): container finished" podID="9f169a08-6015-443c-a9d9-5e3f55e5ef58" containerID="80db716e3daf7dccdb960d0101521240725a4a2754c21a25fd3fae9a938eb708" exitCode=0 Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.054450 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-lrf8c" event={"ID":"9f169a08-6015-443c-a9d9-5e3f55e5ef58","Type":"ContainerDied","Data":"80db716e3daf7dccdb960d0101521240725a4a2754c21a25fd3fae9a938eb708"} Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.460421 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.467889 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jwglz" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.591113 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-operator-scripts\") pod \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\" (UID: \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\") " Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.591167 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xkd5\" (UniqueName: \"kubernetes.io/projected/da21e294-2b36-4937-80ec-15c429fe6be8-kube-api-access-6xkd5\") pod \"da21e294-2b36-4937-80ec-15c429fe6be8\" (UID: \"da21e294-2b36-4937-80ec-15c429fe6be8\") " Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.591195 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da21e294-2b36-4937-80ec-15c429fe6be8-operator-scripts\") pod \"da21e294-2b36-4937-80ec-15c429fe6be8\" (UID: \"da21e294-2b36-4937-80ec-15c429fe6be8\") " Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.591217 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsbhz\" (UniqueName: \"kubernetes.io/projected/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-kube-api-access-dsbhz\") pod \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\" (UID: \"f23d2bf5-14b2-4681-944f-cdb0aedf86c1\") " Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.591685 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f23d2bf5-14b2-4681-944f-cdb0aedf86c1" (UID: "f23d2bf5-14b2-4681-944f-cdb0aedf86c1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.591775 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da21e294-2b36-4937-80ec-15c429fe6be8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "da21e294-2b36-4937-80ec-15c429fe6be8" (UID: "da21e294-2b36-4937-80ec-15c429fe6be8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.600817 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da21e294-2b36-4937-80ec-15c429fe6be8-kube-api-access-6xkd5" (OuterVolumeSpecName: "kube-api-access-6xkd5") pod "da21e294-2b36-4937-80ec-15c429fe6be8" (UID: "da21e294-2b36-4937-80ec-15c429fe6be8"). InnerVolumeSpecName "kube-api-access-6xkd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.604630 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-kube-api-access-dsbhz" (OuterVolumeSpecName: "kube-api-access-dsbhz") pod "f23d2bf5-14b2-4681-944f-cdb0aedf86c1" (UID: "f23d2bf5-14b2-4681-944f-cdb0aedf86c1"). InnerVolumeSpecName "kube-api-access-dsbhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.692565 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.692602 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xkd5\" (UniqueName: \"kubernetes.io/projected/da21e294-2b36-4937-80ec-15c429fe6be8-kube-api-access-6xkd5\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.692616 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da21e294-2b36-4937-80ec-15c429fe6be8-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:50 crc kubenswrapper[4816]: I0216 13:21:50.692624 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsbhz\" (UniqueName: \"kubernetes.io/projected/f23d2bf5-14b2-4681-944f-cdb0aedf86c1-kube-api-access-dsbhz\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.063900 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jwglz" Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.063910 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-jwglz" event={"ID":"da21e294-2b36-4937-80ec-15c429fe6be8","Type":"ContainerDied","Data":"94200609609e807a047776486ac75338d372e3785a6ee85e5a0f7e1e15a2a7f1"} Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.064260 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94200609609e807a047776486ac75338d372e3785a6ee85e5a0f7e1e15a2a7f1" Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.066633 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1657-account-create-update-9lhq5" Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.066607 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1657-account-create-update-9lhq5" event={"ID":"f23d2bf5-14b2-4681-944f-cdb0aedf86c1","Type":"ContainerDied","Data":"0f9519147e0b28538ef589381e93f95a6fd76e26119198a70e18b62b61f1100a"} Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.066767 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f9519147e0b28538ef589381e93f95a6fd76e26119198a70e18b62b61f1100a" Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.290455 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.402788 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f169a08-6015-443c-a9d9-5e3f55e5ef58-operator-scripts\") pod \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\" (UID: \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\") " Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.402871 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rkm2\" (UniqueName: \"kubernetes.io/projected/9f169a08-6015-443c-a9d9-5e3f55e5ef58-kube-api-access-4rkm2\") pod \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\" (UID: \"9f169a08-6015-443c-a9d9-5e3f55e5ef58\") " Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.403507 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f169a08-6015-443c-a9d9-5e3f55e5ef58-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9f169a08-6015-443c-a9d9-5e3f55e5ef58" (UID: "9f169a08-6015-443c-a9d9-5e3f55e5ef58"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.406944 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f169a08-6015-443c-a9d9-5e3f55e5ef58-kube-api-access-4rkm2" (OuterVolumeSpecName: "kube-api-access-4rkm2") pod "9f169a08-6015-443c-a9d9-5e3f55e5ef58" (UID: "9f169a08-6015-443c-a9d9-5e3f55e5ef58"). InnerVolumeSpecName "kube-api-access-4rkm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.504169 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rkm2\" (UniqueName: \"kubernetes.io/projected/9f169a08-6015-443c-a9d9-5e3f55e5ef58-kube-api-access-4rkm2\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:51 crc kubenswrapper[4816]: I0216 13:21:51.504196 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f169a08-6015-443c-a9d9-5e3f55e5ef58-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.074267 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-lrf8c" event={"ID":"9f169a08-6015-443c-a9d9-5e3f55e5ef58","Type":"ContainerDied","Data":"e39f589d8d2ecb31287502558d54a7c2c7deca73b1b63d632990fad6372fc96d"} Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.074594 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e39f589d8d2ecb31287502558d54a7c2c7deca73b1b63d632990fad6372fc96d" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.074294 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-lrf8c" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.272971 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-dncx2"] Feb 16 13:21:52 crc kubenswrapper[4816]: E0216 13:21:52.273371 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f169a08-6015-443c-a9d9-5e3f55e5ef58" containerName="mariadb-account-create-update" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.273393 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f169a08-6015-443c-a9d9-5e3f55e5ef58" containerName="mariadb-account-create-update" Feb 16 13:21:52 crc kubenswrapper[4816]: E0216 13:21:52.273427 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f23d2bf5-14b2-4681-944f-cdb0aedf86c1" containerName="mariadb-account-create-update" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.273434 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f23d2bf5-14b2-4681-944f-cdb0aedf86c1" containerName="mariadb-account-create-update" Feb 16 13:21:52 crc kubenswrapper[4816]: E0216 13:21:52.273443 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da21e294-2b36-4937-80ec-15c429fe6be8" containerName="mariadb-database-create" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.273449 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="da21e294-2b36-4937-80ec-15c429fe6be8" containerName="mariadb-database-create" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.273593 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f169a08-6015-443c-a9d9-5e3f55e5ef58" containerName="mariadb-account-create-update" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.273605 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="da21e294-2b36-4937-80ec-15c429fe6be8" containerName="mariadb-database-create" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.273614 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f23d2bf5-14b2-4681-944f-cdb0aedf86c1" containerName="mariadb-account-create-update" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.274186 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.276508 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.276533 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-8xfml" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.283983 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-dncx2"] Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.416335 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-combined-ca-bundle\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.416419 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-db-sync-config-data\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.416464 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-config-data\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.416490 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mvxj\" (UniqueName: \"kubernetes.io/projected/b97c413a-6a78-4ff5-87e8-31639467ae1d-kube-api-access-4mvxj\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.518168 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-db-sync-config-data\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.518228 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-config-data\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.518253 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mvxj\" (UniqueName: \"kubernetes.io/projected/b97c413a-6a78-4ff5-87e8-31639467ae1d-kube-api-access-4mvxj\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.518338 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-combined-ca-bundle\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.533015 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-db-sync-config-data\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.533064 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-combined-ca-bundle\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.533079 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-config-data\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.539905 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mvxj\" (UniqueName: \"kubernetes.io/projected/b97c413a-6a78-4ff5-87e8-31639467ae1d-kube-api-access-4mvxj\") pod \"glance-db-sync-dncx2\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:52 crc kubenswrapper[4816]: I0216 13:21:52.589465 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dncx2" Feb 16 13:21:53 crc kubenswrapper[4816]: I0216 13:21:53.143528 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-dncx2"] Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.098616 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dncx2" event={"ID":"b97c413a-6a78-4ff5-87e8-31639467ae1d","Type":"ContainerStarted","Data":"5a961660532ff7f002d1885cf2b7dbcadb5416e9f8798ddfa5ae39985e5c1d90"} Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.160240 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-66gzc"] Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.161431 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.190316 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-66gzc"] Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.349529 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.349573 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.349628 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-config\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.349647 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.349786 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7t45\" (UniqueName: \"kubernetes.io/projected/ff43b918-77f6-4472-ab31-01aebee3adaa-kube-api-access-r7t45\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.451051 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.451133 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-config\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.451153 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.451239 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7t45\" (UniqueName: \"kubernetes.io/projected/ff43b918-77f6-4472-ab31-01aebee3adaa-kube-api-access-r7t45\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.451284 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.452794 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.453020 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.453530 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.453630 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-config\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.493704 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7t45\" (UniqueName: \"kubernetes.io/projected/ff43b918-77f6-4472-ab31-01aebee3adaa-kube-api-access-r7t45\") pod \"dnsmasq-dns-b8fbc5445-66gzc\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:54 crc kubenswrapper[4816]: I0216 13:21:54.791057 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.260917 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-66gzc"] Feb 16 13:21:55 crc kubenswrapper[4816]: W0216 13:21:55.274809 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff43b918_77f6_4472_ab31_01aebee3adaa.slice/crio-2e760c30f77b60b5a0b3854afe5383e4a9b29ebd03439bbe148ed46c3ebc0240 WatchSource:0}: Error finding container 2e760c30f77b60b5a0b3854afe5383e4a9b29ebd03439bbe148ed46c3ebc0240: Status 404 returned error can't find the container with id 2e760c30f77b60b5a0b3854afe5383e4a9b29ebd03439bbe148ed46c3ebc0240 Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.365111 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.370144 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.374287 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.374498 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-cmqgv" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.374649 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.374831 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.380114 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.568369 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809ff1b2-f365-4513-89a1-aed781f4b4aa-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.568753 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-cache\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.568784 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.568803 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.568869 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tw2z\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-kube-api-access-7tw2z\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.568890 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-lock\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.670412 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tw2z\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-kube-api-access-7tw2z\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.670479 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-lock\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.670540 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809ff1b2-f365-4513-89a1-aed781f4b4aa-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.670595 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-cache\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.670625 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.670671 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.671078 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: E0216 13:21:55.673117 4816 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 16 13:21:55 crc kubenswrapper[4816]: E0216 13:21:55.673148 4816 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 16 13:21:55 crc kubenswrapper[4816]: E0216 13:21:55.673198 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift podName:809ff1b2-f365-4513-89a1-aed781f4b4aa nodeName:}" failed. No retries permitted until 2026-02-16 13:21:56.173176134 +0000 UTC m=+1115.499889862 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift") pod "swift-storage-0" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa") : configmap "swift-ring-files" not found Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.673267 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-cache\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.673430 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-lock\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.692578 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809ff1b2-f365-4513-89a1-aed781f4b4aa-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.695447 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tw2z\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-kube-api-access-7tw2z\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.729129 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.801897 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-mnq5r"] Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.803560 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.808023 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.808239 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.810883 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.818448 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-mnq5r"] Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.980321 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-swiftconf\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.980381 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-combined-ca-bundle\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.980435 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8p99\" (UniqueName: \"kubernetes.io/projected/3892f567-eaba-40b3-ab11-f49a067ec298-kube-api-access-l8p99\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.980470 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-ring-data-devices\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.980639 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-dispersionconf\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.980683 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3892f567-eaba-40b3-ab11-f49a067ec298-etc-swift\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:55 crc kubenswrapper[4816]: I0216 13:21:55.980753 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-scripts\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.084165 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-scripts\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.084270 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-swiftconf\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.084306 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-combined-ca-bundle\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.084355 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8p99\" (UniqueName: \"kubernetes.io/projected/3892f567-eaba-40b3-ab11-f49a067ec298-kube-api-access-l8p99\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.084386 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-ring-data-devices\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.084477 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-dispersionconf\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.084502 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3892f567-eaba-40b3-ab11-f49a067ec298-etc-swift\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.085249 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3892f567-eaba-40b3-ab11-f49a067ec298-etc-swift\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.086278 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-ring-data-devices\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.086813 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-scripts\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.089997 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-dispersionconf\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.090942 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-swiftconf\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.101466 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8p99\" (UniqueName: \"kubernetes.io/projected/3892f567-eaba-40b3-ab11-f49a067ec298-kube-api-access-l8p99\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.106981 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-combined-ca-bundle\") pod \"swift-ring-rebalance-mnq5r\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.116358 4816 generic.go:334] "Generic (PLEG): container finished" podID="ff43b918-77f6-4472-ab31-01aebee3adaa" containerID="0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad" exitCode=0 Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.116580 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" event={"ID":"ff43b918-77f6-4472-ab31-01aebee3adaa","Type":"ContainerDied","Data":"0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad"} Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.116704 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" event={"ID":"ff43b918-77f6-4472-ab31-01aebee3adaa","Type":"ContainerStarted","Data":"2e760c30f77b60b5a0b3854afe5383e4a9b29ebd03439bbe148ed46c3ebc0240"} Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.140096 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.169521 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.185948 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:56 crc kubenswrapper[4816]: E0216 13:21:56.186171 4816 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 16 13:21:56 crc kubenswrapper[4816]: E0216 13:21:56.186197 4816 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 16 13:21:56 crc kubenswrapper[4816]: E0216 13:21:56.186252 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift podName:809ff1b2-f365-4513-89a1-aed781f4b4aa nodeName:}" failed. No retries permitted until 2026-02-16 13:21:57.186233624 +0000 UTC m=+1116.512947362 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift") pod "swift-storage-0" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa") : configmap "swift-ring-files" not found Feb 16 13:21:56 crc kubenswrapper[4816]: I0216 13:21:56.648963 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-mnq5r"] Feb 16 13:21:57 crc kubenswrapper[4816]: I0216 13:21:57.159713 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mnq5r" event={"ID":"3892f567-eaba-40b3-ab11-f49a067ec298","Type":"ContainerStarted","Data":"b282c49fdb5c9d80244bd08204b08512534519a4a0fcbf10ecfeb2e80baee91e"} Feb 16 13:21:57 crc kubenswrapper[4816]: I0216 13:21:57.175477 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" event={"ID":"ff43b918-77f6-4472-ab31-01aebee3adaa","Type":"ContainerStarted","Data":"b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8"} Feb 16 13:21:57 crc kubenswrapper[4816]: I0216 13:21:57.176798 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:21:57 crc kubenswrapper[4816]: I0216 13:21:57.213942 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:57 crc kubenswrapper[4816]: E0216 13:21:57.214161 4816 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 16 13:21:57 crc kubenswrapper[4816]: E0216 13:21:57.214192 4816 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 16 13:21:57 crc kubenswrapper[4816]: E0216 13:21:57.214261 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift podName:809ff1b2-f365-4513-89a1-aed781f4b4aa nodeName:}" failed. No retries permitted until 2026-02-16 13:21:59.214237695 +0000 UTC m=+1118.540951423 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift") pod "swift-storage-0" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa") : configmap "swift-ring-files" not found Feb 16 13:21:57 crc kubenswrapper[4816]: I0216 13:21:57.215198 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" podStartSLOduration=3.215175701 podStartE2EDuration="3.215175701s" podCreationTimestamp="2026-02-16 13:21:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:21:57.21294648 +0000 UTC m=+1116.539660208" watchObservedRunningTime="2026-02-16 13:21:57.215175701 +0000 UTC m=+1116.541889429" Feb 16 13:21:57 crc kubenswrapper[4816]: I0216 13:21:57.228112 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-v9w6q" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" probeResult="failure" output=< Feb 16 13:21:57 crc kubenswrapper[4816]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 16 13:21:57 crc kubenswrapper[4816]: > Feb 16 13:21:59 crc kubenswrapper[4816]: I0216 13:21:59.248488 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:21:59 crc kubenswrapper[4816]: E0216 13:21:59.248765 4816 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 16 13:21:59 crc kubenswrapper[4816]: E0216 13:21:59.249350 4816 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 16 13:21:59 crc kubenswrapper[4816]: E0216 13:21:59.249460 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift podName:809ff1b2-f365-4513-89a1-aed781f4b4aa nodeName:}" failed. No retries permitted until 2026-02-16 13:22:03.249428678 +0000 UTC m=+1122.576142486 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift") pod "swift-storage-0" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa") : configmap "swift-ring-files" not found Feb 16 13:22:01 crc kubenswrapper[4816]: I0216 13:22:01.211927 4816 generic.go:334] "Generic (PLEG): container finished" podID="9eb39773-46a3-4f31-a95a-64a183dbe417" containerID="1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646" exitCode=0 Feb 16 13:22:01 crc kubenswrapper[4816]: I0216 13:22:01.211999 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9eb39773-46a3-4f31-a95a-64a183dbe417","Type":"ContainerDied","Data":"1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646"} Feb 16 13:22:02 crc kubenswrapper[4816]: I0216 13:22:02.040118 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-v9w6q" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" probeResult="failure" output=< Feb 16 13:22:02 crc kubenswrapper[4816]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 16 13:22:02 crc kubenswrapper[4816]: > Feb 16 13:22:02 crc kubenswrapper[4816]: I0216 13:22:02.223196 4816 generic.go:334] "Generic (PLEG): container finished" podID="ecfcee51-c740-477a-87d9-558fffc58686" containerID="be23562396e8deb7c2fcd78ad08cf2775995e40b6eb695892ea897a7e1bfb880" exitCode=0 Feb 16 13:22:02 crc kubenswrapper[4816]: I0216 13:22:02.223246 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfcee51-c740-477a-87d9-558fffc58686","Type":"ContainerDied","Data":"be23562396e8deb7c2fcd78ad08cf2775995e40b6eb695892ea897a7e1bfb880"} Feb 16 13:22:03 crc kubenswrapper[4816]: I0216 13:22:03.338560 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:22:03 crc kubenswrapper[4816]: E0216 13:22:03.338762 4816 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 16 13:22:03 crc kubenswrapper[4816]: E0216 13:22:03.339244 4816 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 16 13:22:03 crc kubenswrapper[4816]: E0216 13:22:03.339307 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift podName:809ff1b2-f365-4513-89a1-aed781f4b4aa nodeName:}" failed. No retries permitted until 2026-02-16 13:22:11.339290077 +0000 UTC m=+1130.666003805 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift") pod "swift-storage-0" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa") : configmap "swift-ring-files" not found Feb 16 13:22:04 crc kubenswrapper[4816]: I0216 13:22:04.792990 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:22:04 crc kubenswrapper[4816]: I0216 13:22:04.853179 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-fbm7q"] Feb 16 13:22:04 crc kubenswrapper[4816]: I0216 13:22:04.853406 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-fbm7q" podUID="5cde9788-671a-4384-9a93-47ce8dc91118" containerName="dnsmasq-dns" containerID="cri-o://d79bc0394a605a37483c447e5ea49c129eecf0b1c5909faf7747297376eeee4e" gracePeriod=10 Feb 16 13:22:05 crc kubenswrapper[4816]: I0216 13:22:05.275337 4816 generic.go:334] "Generic (PLEG): container finished" podID="5cde9788-671a-4384-9a93-47ce8dc91118" containerID="d79bc0394a605a37483c447e5ea49c129eecf0b1c5909faf7747297376eeee4e" exitCode=0 Feb 16 13:22:05 crc kubenswrapper[4816]: I0216 13:22:05.275390 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-fbm7q" event={"ID":"5cde9788-671a-4384-9a93-47ce8dc91118","Type":"ContainerDied","Data":"d79bc0394a605a37483c447e5ea49c129eecf0b1c5909faf7747297376eeee4e"} Feb 16 13:22:07 crc kubenswrapper[4816]: I0216 13:22:07.037249 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-v9w6q" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" probeResult="failure" output=< Feb 16 13:22:07 crc kubenswrapper[4816]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 16 13:22:07 crc kubenswrapper[4816]: > Feb 16 13:22:07 crc kubenswrapper[4816]: I0216 13:22:07.942337 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.064018 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcq95\" (UniqueName: \"kubernetes.io/projected/5cde9788-671a-4384-9a93-47ce8dc91118-kube-api-access-pcq95\") pod \"5cde9788-671a-4384-9a93-47ce8dc91118\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.064065 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-sb\") pod \"5cde9788-671a-4384-9a93-47ce8dc91118\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.064114 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-nb\") pod \"5cde9788-671a-4384-9a93-47ce8dc91118\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.064133 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-config\") pod \"5cde9788-671a-4384-9a93-47ce8dc91118\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.064188 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-dns-svc\") pod \"5cde9788-671a-4384-9a93-47ce8dc91118\" (UID: \"5cde9788-671a-4384-9a93-47ce8dc91118\") " Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.068879 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cde9788-671a-4384-9a93-47ce8dc91118-kube-api-access-pcq95" (OuterVolumeSpecName: "kube-api-access-pcq95") pod "5cde9788-671a-4384-9a93-47ce8dc91118" (UID: "5cde9788-671a-4384-9a93-47ce8dc91118"). InnerVolumeSpecName "kube-api-access-pcq95". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.104594 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5cde9788-671a-4384-9a93-47ce8dc91118" (UID: "5cde9788-671a-4384-9a93-47ce8dc91118"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.105031 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-config" (OuterVolumeSpecName: "config") pod "5cde9788-671a-4384-9a93-47ce8dc91118" (UID: "5cde9788-671a-4384-9a93-47ce8dc91118"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.106687 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5cde9788-671a-4384-9a93-47ce8dc91118" (UID: "5cde9788-671a-4384-9a93-47ce8dc91118"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.118458 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5cde9788-671a-4384-9a93-47ce8dc91118" (UID: "5cde9788-671a-4384-9a93-47ce8dc91118"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.166789 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.166828 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcq95\" (UniqueName: \"kubernetes.io/projected/5cde9788-671a-4384-9a93-47ce8dc91118-kube-api-access-pcq95\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.166843 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.166855 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.166866 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cde9788-671a-4384-9a93-47ce8dc91118-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.307976 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-fbm7q" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.307954 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-fbm7q" event={"ID":"5cde9788-671a-4384-9a93-47ce8dc91118","Type":"ContainerDied","Data":"1e28c50ee0a029e17f1fcf0298aa63fc1d043aa6c1f5bd913d928f5f94b1ee54"} Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.308117 4816 scope.go:117] "RemoveContainer" containerID="d79bc0394a605a37483c447e5ea49c129eecf0b1c5909faf7747297376eeee4e" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.309906 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mnq5r" event={"ID":"3892f567-eaba-40b3-ab11-f49a067ec298","Type":"ContainerStarted","Data":"c6f647e6f8e63892a1f0767746c765816e5f687f2317d30098382a81bbb1331b"} Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.328344 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9eb39773-46a3-4f31-a95a-64a183dbe417","Type":"ContainerStarted","Data":"663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb"} Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.328563 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.328710 4816 scope.go:117] "RemoveContainer" containerID="4609389cfa9bb393272b9d967e2f9eb65e430d776f90a86f51236044ecbeebdb" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.330547 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfcee51-c740-477a-87d9-558fffc58686","Type":"ContainerStarted","Data":"2fdeaad8597fbc86132995a2af976c1b8f4746137b8a80f5965aad1ee988bc45"} Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.330781 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.336841 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-mnq5r" podStartSLOduration=2.467595345 podStartE2EDuration="13.336823577s" podCreationTimestamp="2026-02-16 13:21:55 +0000 UTC" firstStartedPulling="2026-02-16 13:21:56.662496438 +0000 UTC m=+1115.989210166" lastFinishedPulling="2026-02-16 13:22:07.53172467 +0000 UTC m=+1126.858438398" observedRunningTime="2026-02-16 13:22:08.335089629 +0000 UTC m=+1127.661803357" watchObservedRunningTime="2026-02-16 13:22:08.336823577 +0000 UTC m=+1127.663537295" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.358810 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371954.495983 podStartE2EDuration="1m22.358792298s" podCreationTimestamp="2026-02-16 13:20:46 +0000 UTC" firstStartedPulling="2026-02-16 13:20:49.820727841 +0000 UTC m=+1049.147441569" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:22:08.355083165 +0000 UTC m=+1127.681796893" watchObservedRunningTime="2026-02-16 13:22:08.358792298 +0000 UTC m=+1127.685506026" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.384198 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=43.486762706 podStartE2EDuration="1m22.384171942s" podCreationTimestamp="2026-02-16 13:20:46 +0000 UTC" firstStartedPulling="2026-02-16 13:20:49.199157054 +0000 UTC m=+1048.525870782" lastFinishedPulling="2026-02-16 13:21:28.0965663 +0000 UTC m=+1087.423280018" observedRunningTime="2026-02-16 13:22:08.379348589 +0000 UTC m=+1127.706062317" watchObservedRunningTime="2026-02-16 13:22:08.384171942 +0000 UTC m=+1127.710885670" Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.395972 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-fbm7q"] Feb 16 13:22:08 crc kubenswrapper[4816]: I0216 13:22:08.402968 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-fbm7q"] Feb 16 13:22:09 crc kubenswrapper[4816]: I0216 13:22:09.342671 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dncx2" event={"ID":"b97c413a-6a78-4ff5-87e8-31639467ae1d","Type":"ContainerStarted","Data":"c0d25ba920e734873c0067159aea21f12858190b0e863e919da687bb7caef5c0"} Feb 16 13:22:09 crc kubenswrapper[4816]: I0216 13:22:09.364695 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-dncx2" podStartSLOduration=2.764005233 podStartE2EDuration="17.364673784s" podCreationTimestamp="2026-02-16 13:21:52 +0000 UTC" firstStartedPulling="2026-02-16 13:21:53.135069898 +0000 UTC m=+1112.461783626" lastFinishedPulling="2026-02-16 13:22:07.735738439 +0000 UTC m=+1127.062452177" observedRunningTime="2026-02-16 13:22:09.36308424 +0000 UTC m=+1128.689797978" watchObservedRunningTime="2026-02-16 13:22:09.364673784 +0000 UTC m=+1128.691387522" Feb 16 13:22:09 crc kubenswrapper[4816]: I0216 13:22:09.407434 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cde9788-671a-4384-9a93-47ce8dc91118" path="/var/lib/kubelet/pods/5cde9788-671a-4384-9a93-47ce8dc91118/volumes" Feb 16 13:22:11 crc kubenswrapper[4816]: I0216 13:22:11.358367 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:22:11 crc kubenswrapper[4816]: E0216 13:22:11.358642 4816 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 16 13:22:11 crc kubenswrapper[4816]: E0216 13:22:11.358806 4816 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 16 13:22:11 crc kubenswrapper[4816]: E0216 13:22:11.358867 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift podName:809ff1b2-f365-4513-89a1-aed781f4b4aa nodeName:}" failed. No retries permitted until 2026-02-16 13:22:27.358849055 +0000 UTC m=+1146.685562783 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift") pod "swift-storage-0" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa") : configmap "swift-ring-files" not found Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.052511 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-v9w6q" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" probeResult="failure" output=< Feb 16 13:22:12 crc kubenswrapper[4816]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 16 13:22:12 crc kubenswrapper[4816]: > Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.100746 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.103425 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.797224 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-v9w6q-config-cn455"] Feb 16 13:22:12 crc kubenswrapper[4816]: E0216 13:22:12.797669 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cde9788-671a-4384-9a93-47ce8dc91118" containerName="dnsmasq-dns" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.797687 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cde9788-671a-4384-9a93-47ce8dc91118" containerName="dnsmasq-dns" Feb 16 13:22:12 crc kubenswrapper[4816]: E0216 13:22:12.797705 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cde9788-671a-4384-9a93-47ce8dc91118" containerName="init" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.797713 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cde9788-671a-4384-9a93-47ce8dc91118" containerName="init" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.797925 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cde9788-671a-4384-9a93-47ce8dc91118" containerName="dnsmasq-dns" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.798804 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.801228 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.805800 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v9w6q-config-cn455"] Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.840112 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-additional-scripts\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.840167 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run-ovn\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.840211 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmz9d\" (UniqueName: \"kubernetes.io/projected/fddd8577-e4e5-4562-bced-16c681fdf42b-kube-api-access-gmz9d\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.840270 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-scripts\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.840301 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-log-ovn\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.840346 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.941281 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-additional-scripts\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.941349 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run-ovn\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.941389 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmz9d\" (UniqueName: \"kubernetes.io/projected/fddd8577-e4e5-4562-bced-16c681fdf42b-kube-api-access-gmz9d\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.941445 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-scripts\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.941473 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-log-ovn\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.941523 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.942028 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.942036 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run-ovn\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.942080 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-log-ovn\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.942445 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-additional-scripts\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.944580 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-scripts\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:12 crc kubenswrapper[4816]: I0216 13:22:12.973045 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmz9d\" (UniqueName: \"kubernetes.io/projected/fddd8577-e4e5-4562-bced-16c681fdf42b-kube-api-access-gmz9d\") pod \"ovn-controller-v9w6q-config-cn455\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:13 crc kubenswrapper[4816]: I0216 13:22:13.118517 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:13 crc kubenswrapper[4816]: I0216 13:22:13.714546 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v9w6q-config-cn455"] Feb 16 13:22:14 crc kubenswrapper[4816]: I0216 13:22:14.382521 4816 generic.go:334] "Generic (PLEG): container finished" podID="fddd8577-e4e5-4562-bced-16c681fdf42b" containerID="8c2d4de596683493b52e68cd5bdff78061eb50a65a2f9b18774d741c4b454147" exitCode=0 Feb 16 13:22:14 crc kubenswrapper[4816]: I0216 13:22:14.382704 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q-config-cn455" event={"ID":"fddd8577-e4e5-4562-bced-16c681fdf42b","Type":"ContainerDied","Data":"8c2d4de596683493b52e68cd5bdff78061eb50a65a2f9b18774d741c4b454147"} Feb 16 13:22:14 crc kubenswrapper[4816]: I0216 13:22:14.382866 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q-config-cn455" event={"ID":"fddd8577-e4e5-4562-bced-16c681fdf42b","Type":"ContainerStarted","Data":"098c49ba1aa9cb6d5feb344f5a641e0d259bd124c39435842715f9c9d2d2a0f5"} Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.392529 4816 generic.go:334] "Generic (PLEG): container finished" podID="b97c413a-6a78-4ff5-87e8-31639467ae1d" containerID="c0d25ba920e734873c0067159aea21f12858190b0e863e919da687bb7caef5c0" exitCode=0 Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.392624 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dncx2" event={"ID":"b97c413a-6a78-4ff5-87e8-31639467ae1d","Type":"ContainerDied","Data":"c0d25ba920e734873c0067159aea21f12858190b0e863e919da687bb7caef5c0"} Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.764215 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.886312 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run-ovn\") pod \"fddd8577-e4e5-4562-bced-16c681fdf42b\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.886371 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-additional-scripts\") pod \"fddd8577-e4e5-4562-bced-16c681fdf42b\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.886405 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run\") pod \"fddd8577-e4e5-4562-bced-16c681fdf42b\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.886457 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmz9d\" (UniqueName: \"kubernetes.io/projected/fddd8577-e4e5-4562-bced-16c681fdf42b-kube-api-access-gmz9d\") pod \"fddd8577-e4e5-4562-bced-16c681fdf42b\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.886499 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-log-ovn\") pod \"fddd8577-e4e5-4562-bced-16c681fdf42b\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.886599 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-scripts\") pod \"fddd8577-e4e5-4562-bced-16c681fdf42b\" (UID: \"fddd8577-e4e5-4562-bced-16c681fdf42b\") " Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.887093 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run" (OuterVolumeSpecName: "var-run") pod "fddd8577-e4e5-4562-bced-16c681fdf42b" (UID: "fddd8577-e4e5-4562-bced-16c681fdf42b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.887156 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "fddd8577-e4e5-4562-bced-16c681fdf42b" (UID: "fddd8577-e4e5-4562-bced-16c681fdf42b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.887919 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "fddd8577-e4e5-4562-bced-16c681fdf42b" (UID: "fddd8577-e4e5-4562-bced-16c681fdf42b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.888189 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-scripts" (OuterVolumeSpecName: "scripts") pod "fddd8577-e4e5-4562-bced-16c681fdf42b" (UID: "fddd8577-e4e5-4562-bced-16c681fdf42b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.888221 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "fddd8577-e4e5-4562-bced-16c681fdf42b" (UID: "fddd8577-e4e5-4562-bced-16c681fdf42b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.893424 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fddd8577-e4e5-4562-bced-16c681fdf42b-kube-api-access-gmz9d" (OuterVolumeSpecName: "kube-api-access-gmz9d") pod "fddd8577-e4e5-4562-bced-16c681fdf42b" (UID: "fddd8577-e4e5-4562-bced-16c681fdf42b"). InnerVolumeSpecName "kube-api-access-gmz9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.988711 4816 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.989044 4816 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.989061 4816 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.989070 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmz9d\" (UniqueName: \"kubernetes.io/projected/fddd8577-e4e5-4562-bced-16c681fdf42b-kube-api-access-gmz9d\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.989078 4816 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fddd8577-e4e5-4562-bced-16c681fdf42b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:15 crc kubenswrapper[4816]: I0216 13:22:15.989090 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fddd8577-e4e5-4562-bced-16c681fdf42b-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.408804 4816 generic.go:334] "Generic (PLEG): container finished" podID="3892f567-eaba-40b3-ab11-f49a067ec298" containerID="c6f647e6f8e63892a1f0767746c765816e5f687f2317d30098382a81bbb1331b" exitCode=0 Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.408881 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mnq5r" event={"ID":"3892f567-eaba-40b3-ab11-f49a067ec298","Type":"ContainerDied","Data":"c6f647e6f8e63892a1f0767746c765816e5f687f2317d30098382a81bbb1331b"} Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.413956 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q-config-cn455" Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.418559 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q-config-cn455" event={"ID":"fddd8577-e4e5-4562-bced-16c681fdf42b","Type":"ContainerDied","Data":"098c49ba1aa9cb6d5feb344f5a641e0d259bd124c39435842715f9c9d2d2a0f5"} Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.418623 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="098c49ba1aa9cb6d5feb344f5a641e0d259bd124c39435842715f9c9d2d2a0f5" Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.858521 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dncx2" Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.890628 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-v9w6q-config-cn455"] Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.908828 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-v9w6q-config-cn455"] Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.923431 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-combined-ca-bundle\") pod \"b97c413a-6a78-4ff5-87e8-31639467ae1d\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.923511 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-config-data\") pod \"b97c413a-6a78-4ff5-87e8-31639467ae1d\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.923567 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mvxj\" (UniqueName: \"kubernetes.io/projected/b97c413a-6a78-4ff5-87e8-31639467ae1d-kube-api-access-4mvxj\") pod \"b97c413a-6a78-4ff5-87e8-31639467ae1d\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.923696 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-db-sync-config-data\") pod \"b97c413a-6a78-4ff5-87e8-31639467ae1d\" (UID: \"b97c413a-6a78-4ff5-87e8-31639467ae1d\") " Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.936959 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b97c413a-6a78-4ff5-87e8-31639467ae1d-kube-api-access-4mvxj" (OuterVolumeSpecName: "kube-api-access-4mvxj") pod "b97c413a-6a78-4ff5-87e8-31639467ae1d" (UID: "b97c413a-6a78-4ff5-87e8-31639467ae1d"). InnerVolumeSpecName "kube-api-access-4mvxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.937734 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b97c413a-6a78-4ff5-87e8-31639467ae1d" (UID: "b97c413a-6a78-4ff5-87e8-31639467ae1d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.980938 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-config-data" (OuterVolumeSpecName: "config-data") pod "b97c413a-6a78-4ff5-87e8-31639467ae1d" (UID: "b97c413a-6a78-4ff5-87e8-31639467ae1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:16 crc kubenswrapper[4816]: I0216 13:22:16.996060 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b97c413a-6a78-4ff5-87e8-31639467ae1d" (UID: "b97c413a-6a78-4ff5-87e8-31639467ae1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.008965 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-v9w6q-config-hs65x"] Feb 16 13:22:17 crc kubenswrapper[4816]: E0216 13:22:17.009315 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fddd8577-e4e5-4562-bced-16c681fdf42b" containerName="ovn-config" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.009334 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fddd8577-e4e5-4562-bced-16c681fdf42b" containerName="ovn-config" Feb 16 13:22:17 crc kubenswrapper[4816]: E0216 13:22:17.009354 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b97c413a-6a78-4ff5-87e8-31639467ae1d" containerName="glance-db-sync" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.009360 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b97c413a-6a78-4ff5-87e8-31639467ae1d" containerName="glance-db-sync" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.009576 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="fddd8577-e4e5-4562-bced-16c681fdf42b" containerName="ovn-config" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.009602 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="b97c413a-6a78-4ff5-87e8-31639467ae1d" containerName="glance-db-sync" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.010155 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.014830 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.025170 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mvxj\" (UniqueName: \"kubernetes.io/projected/b97c413a-6a78-4ff5-87e8-31639467ae1d-kube-api-access-4mvxj\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.025220 4816 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.025233 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.025245 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97c413a-6a78-4ff5-87e8-31639467ae1d-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.041475 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v9w6q-config-hs65x"] Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.049691 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-v9w6q" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.128119 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-scripts\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.128185 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.128234 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-log-ovn\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.128274 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-additional-scripts\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.128305 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ggkw\" (UniqueName: \"kubernetes.io/projected/13dd3829-afd7-421b-8caf-6f789f71fc25-kube-api-access-9ggkw\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.128536 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run-ovn\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.230605 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-scripts\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.230949 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.231006 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-log-ovn\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.231050 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-additional-scripts\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.231083 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ggkw\" (UniqueName: \"kubernetes.io/projected/13dd3829-afd7-421b-8caf-6f789f71fc25-kube-api-access-9ggkw\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.231120 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run-ovn\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.231418 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run-ovn\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.232909 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-log-ovn\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.233129 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.233420 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-additional-scripts\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.234789 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-scripts\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.250679 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ggkw\" (UniqueName: \"kubernetes.io/projected/13dd3829-afd7-421b-8caf-6f789f71fc25-kube-api-access-9ggkw\") pod \"ovn-controller-v9w6q-config-hs65x\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.409795 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fddd8577-e4e5-4562-bced-16c681fdf42b" path="/var/lib/kubelet/pods/fddd8577-e4e5-4562-bced-16c681fdf42b/volumes" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.425469 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.441266 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dncx2" Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.441410 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dncx2" event={"ID":"b97c413a-6a78-4ff5-87e8-31639467ae1d","Type":"ContainerDied","Data":"5a961660532ff7f002d1885cf2b7dbcadb5416e9f8798ddfa5ae39985e5c1d90"} Feb 16 13:22:17 crc kubenswrapper[4816]: I0216 13:22:17.441462 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a961660532ff7f002d1885cf2b7dbcadb5416e9f8798ddfa5ae39985e5c1d90" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.099801 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.172916 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2qvgd"] Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.174672 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.231820 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.239518 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2qvgd"] Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.254712 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v9w6q-config-hs65x"] Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.304935 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxl4l\" (UniqueName: \"kubernetes.io/projected/90566558-9f0d-4487-a86c-4ef20464421b-kube-api-access-hxl4l\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.305304 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.305443 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.305504 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-dns-svc\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.305569 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-config\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.406468 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-dispersionconf\") pod \"3892f567-eaba-40b3-ab11-f49a067ec298\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.406882 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-ring-data-devices\") pod \"3892f567-eaba-40b3-ab11-f49a067ec298\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.407087 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-scripts\") pod \"3892f567-eaba-40b3-ab11-f49a067ec298\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.407260 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3892f567-eaba-40b3-ab11-f49a067ec298-etc-swift\") pod \"3892f567-eaba-40b3-ab11-f49a067ec298\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.407422 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8p99\" (UniqueName: \"kubernetes.io/projected/3892f567-eaba-40b3-ab11-f49a067ec298-kube-api-access-l8p99\") pod \"3892f567-eaba-40b3-ab11-f49a067ec298\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.407553 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-combined-ca-bundle\") pod \"3892f567-eaba-40b3-ab11-f49a067ec298\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.407788 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-swiftconf\") pod \"3892f567-eaba-40b3-ab11-f49a067ec298\" (UID: \"3892f567-eaba-40b3-ab11-f49a067ec298\") " Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.408210 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-dns-svc\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.408398 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-config\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.410133 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxl4l\" (UniqueName: \"kubernetes.io/projected/90566558-9f0d-4487-a86c-4ef20464421b-kube-api-access-hxl4l\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.411150 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.411223 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-dns-svc\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.410180 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3892f567-eaba-40b3-ab11-f49a067ec298-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "3892f567-eaba-40b3-ab11-f49a067ec298" (UID: "3892f567-eaba-40b3-ab11-f49a067ec298"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.410772 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "3892f567-eaba-40b3-ab11-f49a067ec298" (UID: "3892f567-eaba-40b3-ab11-f49a067ec298"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.412196 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.413203 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.413792 4816 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.413919 4816 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3892f567-eaba-40b3-ab11-f49a067ec298-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.416764 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.417081 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-config\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.422783 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3892f567-eaba-40b3-ab11-f49a067ec298-kube-api-access-l8p99" (OuterVolumeSpecName: "kube-api-access-l8p99") pod "3892f567-eaba-40b3-ab11-f49a067ec298" (UID: "3892f567-eaba-40b3-ab11-f49a067ec298"). InnerVolumeSpecName "kube-api-access-l8p99". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.433269 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxl4l\" (UniqueName: \"kubernetes.io/projected/90566558-9f0d-4487-a86c-4ef20464421b-kube-api-access-hxl4l\") pod \"dnsmasq-dns-74dc88fc-2qvgd\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.456889 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "3892f567-eaba-40b3-ab11-f49a067ec298" (UID: "3892f567-eaba-40b3-ab11-f49a067ec298"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.469353 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-scripts" (OuterVolumeSpecName: "scripts") pod "3892f567-eaba-40b3-ab11-f49a067ec298" (UID: "3892f567-eaba-40b3-ab11-f49a067ec298"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.475862 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3892f567-eaba-40b3-ab11-f49a067ec298" (UID: "3892f567-eaba-40b3-ab11-f49a067ec298"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.478578 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q-config-hs65x" event={"ID":"13dd3829-afd7-421b-8caf-6f789f71fc25","Type":"ContainerStarted","Data":"a6ac6b8dc2ef2165aff01bca95b68b5051f9f9d498db439bff291361cbbab224"} Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.486321 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mnq5r" event={"ID":"3892f567-eaba-40b3-ab11-f49a067ec298","Type":"ContainerDied","Data":"b282c49fdb5c9d80244bd08204b08512534519a4a0fcbf10ecfeb2e80baee91e"} Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.486390 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b282c49fdb5c9d80244bd08204b08512534519a4a0fcbf10ecfeb2e80baee91e" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.486481 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mnq5r" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.493811 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "3892f567-eaba-40b3-ab11-f49a067ec298" (UID: "3892f567-eaba-40b3-ab11-f49a067ec298"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.516055 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8p99\" (UniqueName: \"kubernetes.io/projected/3892f567-eaba-40b3-ab11-f49a067ec298-kube-api-access-l8p99\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.516087 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.516097 4816 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.516105 4816 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3892f567-eaba-40b3-ab11-f49a067ec298-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.516114 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3892f567-eaba-40b3-ab11-f49a067ec298-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.531819 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.572573 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.617970 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-bqsq5"] Feb 16 13:22:18 crc kubenswrapper[4816]: E0216 13:22:18.618342 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3892f567-eaba-40b3-ab11-f49a067ec298" containerName="swift-ring-rebalance" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.618356 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3892f567-eaba-40b3-ab11-f49a067ec298" containerName="swift-ring-rebalance" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.618547 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3892f567-eaba-40b3-ab11-f49a067ec298" containerName="swift-ring-rebalance" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.623355 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.721287 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0d98c91-1333-411c-9307-90e0efd8005b-operator-scripts\") pod \"cinder-db-create-bqsq5\" (UID: \"f0d98c91-1333-411c-9307-90e0efd8005b\") " pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.721406 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcc58\" (UniqueName: \"kubernetes.io/projected/f0d98c91-1333-411c-9307-90e0efd8005b-kube-api-access-dcc58\") pod \"cinder-db-create-bqsq5\" (UID: \"f0d98c91-1333-411c-9307-90e0efd8005b\") " pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.723880 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bqsq5"] Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.823292 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcc58\" (UniqueName: \"kubernetes.io/projected/f0d98c91-1333-411c-9307-90e0efd8005b-kube-api-access-dcc58\") pod \"cinder-db-create-bqsq5\" (UID: \"f0d98c91-1333-411c-9307-90e0efd8005b\") " pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.823881 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0d98c91-1333-411c-9307-90e0efd8005b-operator-scripts\") pod \"cinder-db-create-bqsq5\" (UID: \"f0d98c91-1333-411c-9307-90e0efd8005b\") " pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.826222 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0d98c91-1333-411c-9307-90e0efd8005b-operator-scripts\") pod \"cinder-db-create-bqsq5\" (UID: \"f0d98c91-1333-411c-9307-90e0efd8005b\") " pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.866034 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-7489-account-create-update-9p66g"] Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.867587 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.875465 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.888883 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcc58\" (UniqueName: \"kubernetes.io/projected/f0d98c91-1333-411c-9307-90e0efd8005b-kube-api-access-dcc58\") pod \"cinder-db-create-bqsq5\" (UID: \"f0d98c91-1333-411c-9307-90e0efd8005b\") " pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.902614 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-7489-account-create-update-9p66g"] Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.926499 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60ac96a6-9ece-4d04-994a-70c576c82534-operator-scripts\") pod \"cinder-7489-account-create-update-9p66g\" (UID: \"60ac96a6-9ece-4d04-994a-70c576c82534\") " pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.927328 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g2pp\" (UniqueName: \"kubernetes.io/projected/60ac96a6-9ece-4d04-994a-70c576c82534-kube-api-access-7g2pp\") pod \"cinder-7489-account-create-update-9p66g\" (UID: \"60ac96a6-9ece-4d04-994a-70c576c82534\") " pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:18 crc kubenswrapper[4816]: I0216 13:22:18.957049 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.028720 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-g75rg"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.029982 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.030708 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60ac96a6-9ece-4d04-994a-70c576c82534-operator-scripts\") pod \"cinder-7489-account-create-update-9p66g\" (UID: \"60ac96a6-9ece-4d04-994a-70c576c82534\") " pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.030790 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g2pp\" (UniqueName: \"kubernetes.io/projected/60ac96a6-9ece-4d04-994a-70c576c82534-kube-api-access-7g2pp\") pod \"cinder-7489-account-create-update-9p66g\" (UID: \"60ac96a6-9ece-4d04-994a-70c576c82534\") " pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.031336 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60ac96a6-9ece-4d04-994a-70c576c82534-operator-scripts\") pod \"cinder-7489-account-create-update-9p66g\" (UID: \"60ac96a6-9ece-4d04-994a-70c576c82534\") " pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.067565 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-g75rg"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.087571 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g2pp\" (UniqueName: \"kubernetes.io/projected/60ac96a6-9ece-4d04-994a-70c576c82534-kube-api-access-7g2pp\") pod \"cinder-7489-account-create-update-9p66g\" (UID: \"60ac96a6-9ece-4d04-994a-70c576c82534\") " pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.118854 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-njv4t"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.123012 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.137890 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r88b4\" (UniqueName: \"kubernetes.io/projected/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-kube-api-access-r88b4\") pod \"barbican-db-create-g75rg\" (UID: \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\") " pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.138015 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-operator-scripts\") pod \"barbican-db-create-g75rg\" (UID: \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\") " pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.209327 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-fc34-account-create-update-whvjm"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.211763 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.216169 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.224045 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-njv4t"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.233599 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-fc34-account-create-update-whvjm"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.240217 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r88b4\" (UniqueName: \"kubernetes.io/projected/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-kube-api-access-r88b4\") pod \"barbican-db-create-g75rg\" (UID: \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\") " pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.240313 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eac035cf-66af-4b1a-9798-8e7a05975c7e-operator-scripts\") pod \"neutron-db-create-njv4t\" (UID: \"eac035cf-66af-4b1a-9798-8e7a05975c7e\") " pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.240337 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6tmr\" (UniqueName: \"kubernetes.io/projected/eac035cf-66af-4b1a-9798-8e7a05975c7e-kube-api-access-n6tmr\") pod \"neutron-db-create-njv4t\" (UID: \"eac035cf-66af-4b1a-9798-8e7a05975c7e\") " pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.240407 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-operator-scripts\") pod \"barbican-db-create-g75rg\" (UID: \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\") " pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.241252 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-operator-scripts\") pod \"barbican-db-create-g75rg\" (UID: \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\") " pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.241474 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.272616 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-d27p4"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.273888 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.276948 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.277764 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.277959 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6zz64" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.278031 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 16 13:22:19 crc kubenswrapper[4816]: W0216 13:22:19.278104 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90566558_9f0d_4487_a86c_4ef20464421b.slice/crio-97b4d4304d126047f7d59373f0ca7fbbad3822c3cf9207820be2216bdfddfdee WatchSource:0}: Error finding container 97b4d4304d126047f7d59373f0ca7fbbad3822c3cf9207820be2216bdfddfdee: Status 404 returned error can't find the container with id 97b4d4304d126047f7d59373f0ca7fbbad3822c3cf9207820be2216bdfddfdee Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.279057 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-d27p4"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.285062 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-e623-account-create-update-vfwn8"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.288637 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.291258 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e623-account-create-update-vfwn8"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.292063 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r88b4\" (UniqueName: \"kubernetes.io/projected/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-kube-api-access-r88b4\") pod \"barbican-db-create-g75rg\" (UID: \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\") " pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.295616 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.304762 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2qvgd"] Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.343140 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-operator-scripts\") pod \"neutron-e623-account-create-update-vfwn8\" (UID: \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\") " pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.343191 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/608922e5-2248-48a4-bd64-6a0a508ddf23-operator-scripts\") pod \"barbican-fc34-account-create-update-whvjm\" (UID: \"608922e5-2248-48a4-bd64-6a0a508ddf23\") " pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.343218 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rwgv\" (UniqueName: \"kubernetes.io/projected/608922e5-2248-48a4-bd64-6a0a508ddf23-kube-api-access-6rwgv\") pod \"barbican-fc34-account-create-update-whvjm\" (UID: \"608922e5-2248-48a4-bd64-6a0a508ddf23\") " pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.343362 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eac035cf-66af-4b1a-9798-8e7a05975c7e-operator-scripts\") pod \"neutron-db-create-njv4t\" (UID: \"eac035cf-66af-4b1a-9798-8e7a05975c7e\") " pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.343437 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6tmr\" (UniqueName: \"kubernetes.io/projected/eac035cf-66af-4b1a-9798-8e7a05975c7e-kube-api-access-n6tmr\") pod \"neutron-db-create-njv4t\" (UID: \"eac035cf-66af-4b1a-9798-8e7a05975c7e\") " pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.343506 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js874\" (UniqueName: \"kubernetes.io/projected/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-kube-api-access-js874\") pod \"keystone-db-sync-d27p4\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.343531 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-combined-ca-bundle\") pod \"keystone-db-sync-d27p4\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.343556 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5q7j\" (UniqueName: \"kubernetes.io/projected/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-kube-api-access-g5q7j\") pod \"neutron-e623-account-create-update-vfwn8\" (UID: \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\") " pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.343598 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-config-data\") pod \"keystone-db-sync-d27p4\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.344347 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eac035cf-66af-4b1a-9798-8e7a05975c7e-operator-scripts\") pod \"neutron-db-create-njv4t\" (UID: \"eac035cf-66af-4b1a-9798-8e7a05975c7e\") " pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.365227 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6tmr\" (UniqueName: \"kubernetes.io/projected/eac035cf-66af-4b1a-9798-8e7a05975c7e-kube-api-access-n6tmr\") pod \"neutron-db-create-njv4t\" (UID: \"eac035cf-66af-4b1a-9798-8e7a05975c7e\") " pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.444988 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js874\" (UniqueName: \"kubernetes.io/projected/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-kube-api-access-js874\") pod \"keystone-db-sync-d27p4\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.445330 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-combined-ca-bundle\") pod \"keystone-db-sync-d27p4\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.445355 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5q7j\" (UniqueName: \"kubernetes.io/projected/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-kube-api-access-g5q7j\") pod \"neutron-e623-account-create-update-vfwn8\" (UID: \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\") " pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.445402 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-config-data\") pod \"keystone-db-sync-d27p4\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.445443 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-operator-scripts\") pod \"neutron-e623-account-create-update-vfwn8\" (UID: \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\") " pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.445467 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/608922e5-2248-48a4-bd64-6a0a508ddf23-operator-scripts\") pod \"barbican-fc34-account-create-update-whvjm\" (UID: \"608922e5-2248-48a4-bd64-6a0a508ddf23\") " pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.445488 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rwgv\" (UniqueName: \"kubernetes.io/projected/608922e5-2248-48a4-bd64-6a0a508ddf23-kube-api-access-6rwgv\") pod \"barbican-fc34-account-create-update-whvjm\" (UID: \"608922e5-2248-48a4-bd64-6a0a508ddf23\") " pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.446799 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-operator-scripts\") pod \"neutron-e623-account-create-update-vfwn8\" (UID: \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\") " pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.446831 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/608922e5-2248-48a4-bd64-6a0a508ddf23-operator-scripts\") pod \"barbican-fc34-account-create-update-whvjm\" (UID: \"608922e5-2248-48a4-bd64-6a0a508ddf23\") " pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.450242 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.451078 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-combined-ca-bundle\") pod \"keystone-db-sync-d27p4\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.458792 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-config-data\") pod \"keystone-db-sync-d27p4\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.471564 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rwgv\" (UniqueName: \"kubernetes.io/projected/608922e5-2248-48a4-bd64-6a0a508ddf23-kube-api-access-6rwgv\") pod \"barbican-fc34-account-create-update-whvjm\" (UID: \"608922e5-2248-48a4-bd64-6a0a508ddf23\") " pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.476892 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5q7j\" (UniqueName: \"kubernetes.io/projected/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-kube-api-access-g5q7j\") pod \"neutron-e623-account-create-update-vfwn8\" (UID: \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\") " pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.480478 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js874\" (UniqueName: \"kubernetes.io/projected/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-kube-api-access-js874\") pod \"keystone-db-sync-d27p4\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.490319 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.499607 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" event={"ID":"90566558-9f0d-4487-a86c-4ef20464421b","Type":"ContainerStarted","Data":"20780713f4d7a496883270efe7d7238976e8bc327c9fc83b7c99593d4d010628"} Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.499647 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" event={"ID":"90566558-9f0d-4487-a86c-4ef20464421b","Type":"ContainerStarted","Data":"97b4d4304d126047f7d59373f0ca7fbbad3822c3cf9207820be2216bdfddfdee"} Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.520145 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q-config-hs65x" event={"ID":"13dd3829-afd7-421b-8caf-6f789f71fc25","Type":"ContainerStarted","Data":"1b3d6bc3eedad7ccfb9c38488642bb42e53f8190c7302116b2a8895960a5652d"} Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.535362 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.595401 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:19 crc kubenswrapper[4816]: I0216 13:22:19.612448 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.259087 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-njv4t"] Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.271831 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-7489-account-create-update-9p66g"] Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.417948 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-g75rg"] Feb 16 13:22:20 crc kubenswrapper[4816]: W0216 13:22:20.452536 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc95c4bfe_79a7_4485_ad1b_f76a3bcc6e39.slice/crio-f286dfaa7dcf6eef9f7fd5e181287288bd139f8f4acf51514cd1abf99af21e64 WatchSource:0}: Error finding container f286dfaa7dcf6eef9f7fd5e181287288bd139f8f4acf51514cd1abf99af21e64: Status 404 returned error can't find the container with id f286dfaa7dcf6eef9f7fd5e181287288bd139f8f4acf51514cd1abf99af21e64 Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.455298 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-d27p4"] Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.468377 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-fc34-account-create-update-whvjm"] Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.486124 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bqsq5"] Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.503279 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e623-account-create-update-vfwn8"] Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.545789 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g75rg" event={"ID":"b7aca2c9-78a1-48a0-b26c-b19a546eeeba","Type":"ContainerStarted","Data":"138ff394b6dc390828abdf5b21d0dc482ac2882a4759be794c016edebaf903cf"} Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.549005 4816 generic.go:334] "Generic (PLEG): container finished" podID="90566558-9f0d-4487-a86c-4ef20464421b" containerID="20780713f4d7a496883270efe7d7238976e8bc327c9fc83b7c99593d4d010628" exitCode=0 Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.549232 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" event={"ID":"90566558-9f0d-4487-a86c-4ef20464421b","Type":"ContainerDied","Data":"20780713f4d7a496883270efe7d7238976e8bc327c9fc83b7c99593d4d010628"} Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.552968 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7489-account-create-update-9p66g" event={"ID":"60ac96a6-9ece-4d04-994a-70c576c82534","Type":"ContainerStarted","Data":"97c5a20e70e26c26ea85e78f7e85d553331f044a9dedd7395928694b74e87b37"} Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.555676 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-njv4t" event={"ID":"eac035cf-66af-4b1a-9798-8e7a05975c7e","Type":"ContainerStarted","Data":"e5cb42f48d42f6808ec9068ed1a26099818c38ee1431636f456cd5835ca298bf"} Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.558280 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-d27p4" event={"ID":"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39","Type":"ContainerStarted","Data":"f286dfaa7dcf6eef9f7fd5e181287288bd139f8f4acf51514cd1abf99af21e64"} Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.562734 4816 generic.go:334] "Generic (PLEG): container finished" podID="13dd3829-afd7-421b-8caf-6f789f71fc25" containerID="1b3d6bc3eedad7ccfb9c38488642bb42e53f8190c7302116b2a8895960a5652d" exitCode=0 Feb 16 13:22:20 crc kubenswrapper[4816]: I0216 13:22:20.562771 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q-config-hs65x" event={"ID":"13dd3829-afd7-421b-8caf-6f789f71fc25","Type":"ContainerDied","Data":"1b3d6bc3eedad7ccfb9c38488642bb42e53f8190c7302116b2a8895960a5652d"} Feb 16 13:22:20 crc kubenswrapper[4816]: W0216 13:22:20.566329 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0d98c91_1333_411c_9307_90e0efd8005b.slice/crio-adaac17156ad742205b673410c0e1f7556644824dfbaab5586a7bc9aab72e9c2 WatchSource:0}: Error finding container adaac17156ad742205b673410c0e1f7556644824dfbaab5586a7bc9aab72e9c2: Status 404 returned error can't find the container with id adaac17156ad742205b673410c0e1f7556644824dfbaab5586a7bc9aab72e9c2 Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.131109 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.186432 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-additional-scripts\") pod \"13dd3829-afd7-421b-8caf-6f789f71fc25\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.186545 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run-ovn\") pod \"13dd3829-afd7-421b-8caf-6f789f71fc25\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.186636 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run\") pod \"13dd3829-afd7-421b-8caf-6f789f71fc25\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.186698 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-log-ovn\") pod \"13dd3829-afd7-421b-8caf-6f789f71fc25\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.186743 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ggkw\" (UniqueName: \"kubernetes.io/projected/13dd3829-afd7-421b-8caf-6f789f71fc25-kube-api-access-9ggkw\") pod \"13dd3829-afd7-421b-8caf-6f789f71fc25\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.186807 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-scripts\") pod \"13dd3829-afd7-421b-8caf-6f789f71fc25\" (UID: \"13dd3829-afd7-421b-8caf-6f789f71fc25\") " Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.189239 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-scripts" (OuterVolumeSpecName: "scripts") pod "13dd3829-afd7-421b-8caf-6f789f71fc25" (UID: "13dd3829-afd7-421b-8caf-6f789f71fc25"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.190354 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "13dd3829-afd7-421b-8caf-6f789f71fc25" (UID: "13dd3829-afd7-421b-8caf-6f789f71fc25"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.190410 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "13dd3829-afd7-421b-8caf-6f789f71fc25" (UID: "13dd3829-afd7-421b-8caf-6f789f71fc25"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.190444 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run" (OuterVolumeSpecName: "var-run") pod "13dd3829-afd7-421b-8caf-6f789f71fc25" (UID: "13dd3829-afd7-421b-8caf-6f789f71fc25"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.190470 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "13dd3829-afd7-421b-8caf-6f789f71fc25" (UID: "13dd3829-afd7-421b-8caf-6f789f71fc25"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.211287 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13dd3829-afd7-421b-8caf-6f789f71fc25-kube-api-access-9ggkw" (OuterVolumeSpecName: "kube-api-access-9ggkw") pod "13dd3829-afd7-421b-8caf-6f789f71fc25" (UID: "13dd3829-afd7-421b-8caf-6f789f71fc25"). InnerVolumeSpecName "kube-api-access-9ggkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.288783 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.288830 4816 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/13dd3829-afd7-421b-8caf-6f789f71fc25-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.288845 4816 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.288858 4816 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.288871 4816 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/13dd3829-afd7-421b-8caf-6f789f71fc25-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.288884 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ggkw\" (UniqueName: \"kubernetes.io/projected/13dd3829-afd7-421b-8caf-6f789f71fc25-kube-api-access-9ggkw\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.571936 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bqsq5" event={"ID":"f0d98c91-1333-411c-9307-90e0efd8005b","Type":"ContainerStarted","Data":"adaac17156ad742205b673410c0e1f7556644824dfbaab5586a7bc9aab72e9c2"} Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.573347 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-fc34-account-create-update-whvjm" event={"ID":"608922e5-2248-48a4-bd64-6a0a508ddf23","Type":"ContainerStarted","Data":"1b87411c031e3203ffde3639219b64bd3369c3f448e2d3171678b743602ba6ad"} Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.575003 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q-config-hs65x" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.575629 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q-config-hs65x" event={"ID":"13dd3829-afd7-421b-8caf-6f789f71fc25","Type":"ContainerDied","Data":"a6ac6b8dc2ef2165aff01bca95b68b5051f9f9d498db439bff291361cbbab224"} Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.575670 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6ac6b8dc2ef2165aff01bca95b68b5051f9f9d498db439bff291361cbbab224" Feb 16 13:22:21 crc kubenswrapper[4816]: I0216 13:22:21.578765 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e623-account-create-update-vfwn8" event={"ID":"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78","Type":"ContainerStarted","Data":"54f68a5c85e1cd3c0b1f394d1147eb138daf9753d1c402dd77d5ce52c31666aa"} Feb 16 13:22:22 crc kubenswrapper[4816]: I0216 13:22:22.258243 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-v9w6q-config-hs65x"] Feb 16 13:22:22 crc kubenswrapper[4816]: I0216 13:22:22.267258 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-v9w6q-config-hs65x"] Feb 16 13:22:23 crc kubenswrapper[4816]: I0216 13:22:23.411367 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13dd3829-afd7-421b-8caf-6f789f71fc25" path="/var/lib/kubelet/pods/13dd3829-afd7-421b-8caf-6f789f71fc25/volumes" Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.613465 4816 generic.go:334] "Generic (PLEG): container finished" podID="608922e5-2248-48a4-bd64-6a0a508ddf23" containerID="4bf380764ceb5ddaf487fb4521c9455b43fabb3c7c18c66b33792776e7c2f38e" exitCode=0 Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.614257 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-fc34-account-create-update-whvjm" event={"ID":"608922e5-2248-48a4-bd64-6a0a508ddf23","Type":"ContainerDied","Data":"4bf380764ceb5ddaf487fb4521c9455b43fabb3c7c18c66b33792776e7c2f38e"} Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.619241 4816 generic.go:334] "Generic (PLEG): container finished" podID="b7aca2c9-78a1-48a0-b26c-b19a546eeeba" containerID="c983d929214d9a1e3bd142f2e8f7bf5e969c9d077da70020eb59bde75ca3eb44" exitCode=0 Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.619350 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g75rg" event={"ID":"b7aca2c9-78a1-48a0-b26c-b19a546eeeba","Type":"ContainerDied","Data":"c983d929214d9a1e3bd142f2e8f7bf5e969c9d077da70020eb59bde75ca3eb44"} Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.621773 4816 generic.go:334] "Generic (PLEG): container finished" podID="f66acf8f-1ec5-4f7a-aff9-2511af6e9d78" containerID="004c0da5fb3afa4a342ed9b071e640e332b3652bacb749a155ade0c4ff13924a" exitCode=0 Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.621798 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e623-account-create-update-vfwn8" event={"ID":"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78","Type":"ContainerDied","Data":"004c0da5fb3afa4a342ed9b071e640e332b3652bacb749a155ade0c4ff13924a"} Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.624328 4816 generic.go:334] "Generic (PLEG): container finished" podID="f0d98c91-1333-411c-9307-90e0efd8005b" containerID="0c593b579f73184d42db2241d7d922c67c9bac01382ab293a7dabf842695c8bf" exitCode=0 Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.624381 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bqsq5" event={"ID":"f0d98c91-1333-411c-9307-90e0efd8005b","Type":"ContainerDied","Data":"0c593b579f73184d42db2241d7d922c67c9bac01382ab293a7dabf842695c8bf"} Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.638112 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" event={"ID":"90566558-9f0d-4487-a86c-4ef20464421b","Type":"ContainerStarted","Data":"fdbc95b7458ad30ac590cacf9be1c0879275975b764f1daeae329a1db2e0aa82"} Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.638744 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.640923 4816 generic.go:334] "Generic (PLEG): container finished" podID="60ac96a6-9ece-4d04-994a-70c576c82534" containerID="e63f76aba075ce81a3ca07eb9f040953dc5a9cadaac3012c7e2ddbbae73f9f45" exitCode=0 Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.641029 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7489-account-create-update-9p66g" event={"ID":"60ac96a6-9ece-4d04-994a-70c576c82534","Type":"ContainerDied","Data":"e63f76aba075ce81a3ca07eb9f040953dc5a9cadaac3012c7e2ddbbae73f9f45"} Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.643427 4816 generic.go:334] "Generic (PLEG): container finished" podID="eac035cf-66af-4b1a-9798-8e7a05975c7e" containerID="feea9035d59c55445bcca93b4b1d6b7b8ed4d1f3147f21bda9862dd9c1e1b9f5" exitCode=0 Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.643493 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-njv4t" event={"ID":"eac035cf-66af-4b1a-9798-8e7a05975c7e","Type":"ContainerDied","Data":"feea9035d59c55445bcca93b4b1d6b7b8ed4d1f3147f21bda9862dd9c1e1b9f5"} Feb 16 13:22:24 crc kubenswrapper[4816]: I0216 13:22:24.729969 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" podStartSLOduration=6.729946013 podStartE2EDuration="6.729946013s" podCreationTimestamp="2026-02-16 13:22:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:22:24.717643696 +0000 UTC m=+1144.044357424" watchObservedRunningTime="2026-02-16 13:22:24.729946013 +0000 UTC m=+1144.056659741" Feb 16 13:22:27 crc kubenswrapper[4816]: I0216 13:22:27.418254 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:22:27 crc kubenswrapper[4816]: I0216 13:22:27.427807 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"swift-storage-0\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " pod="openstack/swift-storage-0" Feb 16 13:22:27 crc kubenswrapper[4816]: I0216 13:22:27.519740 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 16 13:22:27 crc kubenswrapper[4816]: E0216 13:22:27.912403 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13dd3829_afd7_421b_8caf_6f789f71fc25.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.040692 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.048027 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.051445 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.056736 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.134294 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0d98c91-1333-411c-9307-90e0efd8005b-operator-scripts\") pod \"f0d98c91-1333-411c-9307-90e0efd8005b\" (UID: \"f0d98c91-1333-411c-9307-90e0efd8005b\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.134361 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g2pp\" (UniqueName: \"kubernetes.io/projected/60ac96a6-9ece-4d04-994a-70c576c82534-kube-api-access-7g2pp\") pod \"60ac96a6-9ece-4d04-994a-70c576c82534\" (UID: \"60ac96a6-9ece-4d04-994a-70c576c82534\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.134426 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5q7j\" (UniqueName: \"kubernetes.io/projected/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-kube-api-access-g5q7j\") pod \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\" (UID: \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.134451 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcc58\" (UniqueName: \"kubernetes.io/projected/f0d98c91-1333-411c-9307-90e0efd8005b-kube-api-access-dcc58\") pod \"f0d98c91-1333-411c-9307-90e0efd8005b\" (UID: \"f0d98c91-1333-411c-9307-90e0efd8005b\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.134467 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eac035cf-66af-4b1a-9798-8e7a05975c7e-operator-scripts\") pod \"eac035cf-66af-4b1a-9798-8e7a05975c7e\" (UID: \"eac035cf-66af-4b1a-9798-8e7a05975c7e\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.134486 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6tmr\" (UniqueName: \"kubernetes.io/projected/eac035cf-66af-4b1a-9798-8e7a05975c7e-kube-api-access-n6tmr\") pod \"eac035cf-66af-4b1a-9798-8e7a05975c7e\" (UID: \"eac035cf-66af-4b1a-9798-8e7a05975c7e\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.134511 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-operator-scripts\") pod \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\" (UID: \"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.134589 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60ac96a6-9ece-4d04-994a-70c576c82534-operator-scripts\") pod \"60ac96a6-9ece-4d04-994a-70c576c82534\" (UID: \"60ac96a6-9ece-4d04-994a-70c576c82534\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.135410 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60ac96a6-9ece-4d04-994a-70c576c82534-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "60ac96a6-9ece-4d04-994a-70c576c82534" (UID: "60ac96a6-9ece-4d04-994a-70c576c82534"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.135794 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eac035cf-66af-4b1a-9798-8e7a05975c7e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eac035cf-66af-4b1a-9798-8e7a05975c7e" (UID: "eac035cf-66af-4b1a-9798-8e7a05975c7e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.137179 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0d98c91-1333-411c-9307-90e0efd8005b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f0d98c91-1333-411c-9307-90e0efd8005b" (UID: "f0d98c91-1333-411c-9307-90e0efd8005b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.137299 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f66acf8f-1ec5-4f7a-aff9-2511af6e9d78" (UID: "f66acf8f-1ec5-4f7a-aff9-2511af6e9d78"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.139891 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60ac96a6-9ece-4d04-994a-70c576c82534-kube-api-access-7g2pp" (OuterVolumeSpecName: "kube-api-access-7g2pp") pod "60ac96a6-9ece-4d04-994a-70c576c82534" (UID: "60ac96a6-9ece-4d04-994a-70c576c82534"). InnerVolumeSpecName "kube-api-access-7g2pp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.140020 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eac035cf-66af-4b1a-9798-8e7a05975c7e-kube-api-access-n6tmr" (OuterVolumeSpecName: "kube-api-access-n6tmr") pod "eac035cf-66af-4b1a-9798-8e7a05975c7e" (UID: "eac035cf-66af-4b1a-9798-8e7a05975c7e"). InnerVolumeSpecName "kube-api-access-n6tmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.140941 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.141504 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0d98c91-1333-411c-9307-90e0efd8005b-kube-api-access-dcc58" (OuterVolumeSpecName: "kube-api-access-dcc58") pod "f0d98c91-1333-411c-9307-90e0efd8005b" (UID: "f0d98c91-1333-411c-9307-90e0efd8005b"). InnerVolumeSpecName "kube-api-access-dcc58". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.142355 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-kube-api-access-g5q7j" (OuterVolumeSpecName: "kube-api-access-g5q7j") pod "f66acf8f-1ec5-4f7a-aff9-2511af6e9d78" (UID: "f66acf8f-1ec5-4f7a-aff9-2511af6e9d78"). InnerVolumeSpecName "kube-api-access-g5q7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.144649 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.236515 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rwgv\" (UniqueName: \"kubernetes.io/projected/608922e5-2248-48a4-bd64-6a0a508ddf23-kube-api-access-6rwgv\") pod \"608922e5-2248-48a4-bd64-6a0a508ddf23\" (UID: \"608922e5-2248-48a4-bd64-6a0a508ddf23\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.236718 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-operator-scripts\") pod \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\" (UID: \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.236804 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/608922e5-2248-48a4-bd64-6a0a508ddf23-operator-scripts\") pod \"608922e5-2248-48a4-bd64-6a0a508ddf23\" (UID: \"608922e5-2248-48a4-bd64-6a0a508ddf23\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.236880 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r88b4\" (UniqueName: \"kubernetes.io/projected/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-kube-api-access-r88b4\") pod \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\" (UID: \"b7aca2c9-78a1-48a0-b26c-b19a546eeeba\") " Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237524 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237558 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60ac96a6-9ece-4d04-994a-70c576c82534-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237572 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0d98c91-1333-411c-9307-90e0efd8005b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237587 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g2pp\" (UniqueName: \"kubernetes.io/projected/60ac96a6-9ece-4d04-994a-70c576c82534-kube-api-access-7g2pp\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237601 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5q7j\" (UniqueName: \"kubernetes.io/projected/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78-kube-api-access-g5q7j\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237612 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcc58\" (UniqueName: \"kubernetes.io/projected/f0d98c91-1333-411c-9307-90e0efd8005b-kube-api-access-dcc58\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237624 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eac035cf-66af-4b1a-9798-8e7a05975c7e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237636 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6tmr\" (UniqueName: \"kubernetes.io/projected/eac035cf-66af-4b1a-9798-8e7a05975c7e-kube-api-access-n6tmr\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237673 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b7aca2c9-78a1-48a0-b26c-b19a546eeeba" (UID: "b7aca2c9-78a1-48a0-b26c-b19a546eeeba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.237943 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/608922e5-2248-48a4-bd64-6a0a508ddf23-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "608922e5-2248-48a4-bd64-6a0a508ddf23" (UID: "608922e5-2248-48a4-bd64-6a0a508ddf23"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.242219 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/608922e5-2248-48a4-bd64-6a0a508ddf23-kube-api-access-6rwgv" (OuterVolumeSpecName: "kube-api-access-6rwgv") pod "608922e5-2248-48a4-bd64-6a0a508ddf23" (UID: "608922e5-2248-48a4-bd64-6a0a508ddf23"). InnerVolumeSpecName "kube-api-access-6rwgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.242919 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-kube-api-access-r88b4" (OuterVolumeSpecName: "kube-api-access-r88b4") pod "b7aca2c9-78a1-48a0-b26c-b19a546eeeba" (UID: "b7aca2c9-78a1-48a0-b26c-b19a546eeeba"). InnerVolumeSpecName "kube-api-access-r88b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.339691 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r88b4\" (UniqueName: \"kubernetes.io/projected/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-kube-api-access-r88b4\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.339720 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rwgv\" (UniqueName: \"kubernetes.io/projected/608922e5-2248-48a4-bd64-6a0a508ddf23-kube-api-access-6rwgv\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.339745 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7aca2c9-78a1-48a0-b26c-b19a546eeeba-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.339754 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/608922e5-2248-48a4-bd64-6a0a508ddf23-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.574943 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:28 crc kubenswrapper[4816]: W0216 13:22:28.581885 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod809ff1b2_f365_4513_89a1_aed781f4b4aa.slice/crio-b9ba6f9e177ade30a6e030412df1bef59d77c7d7e14b218e8a5db7aa6b066631 WatchSource:0}: Error finding container b9ba6f9e177ade30a6e030412df1bef59d77c7d7e14b218e8a5db7aa6b066631: Status 404 returned error can't find the container with id b9ba6f9e177ade30a6e030412df1bef59d77c7d7e14b218e8a5db7aa6b066631 Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.584507 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.652302 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-66gzc"] Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.652649 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" podUID="ff43b918-77f6-4472-ab31-01aebee3adaa" containerName="dnsmasq-dns" containerID="cri-o://b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8" gracePeriod=10 Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.684319 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bqsq5" event={"ID":"f0d98c91-1333-411c-9307-90e0efd8005b","Type":"ContainerDied","Data":"adaac17156ad742205b673410c0e1f7556644824dfbaab5586a7bc9aab72e9c2"} Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.684357 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adaac17156ad742205b673410c0e1f7556644824dfbaab5586a7bc9aab72e9c2" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.684461 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bqsq5" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.693397 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7489-account-create-update-9p66g" event={"ID":"60ac96a6-9ece-4d04-994a-70c576c82534","Type":"ContainerDied","Data":"97c5a20e70e26c26ea85e78f7e85d553331f044a9dedd7395928694b74e87b37"} Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.693422 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97c5a20e70e26c26ea85e78f7e85d553331f044a9dedd7395928694b74e87b37" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.693494 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7489-account-create-update-9p66g" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.710593 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-njv4t" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.710592 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-njv4t" event={"ID":"eac035cf-66af-4b1a-9798-8e7a05975c7e","Type":"ContainerDied","Data":"e5cb42f48d42f6808ec9068ed1a26099818c38ee1431636f456cd5835ca298bf"} Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.711780 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5cb42f48d42f6808ec9068ed1a26099818c38ee1431636f456cd5835ca298bf" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.719248 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-d27p4" event={"ID":"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39","Type":"ContainerStarted","Data":"e5420356a58f2a742209fa93eeb74cfe936d1655956ca551c31cf64d9fc74339"} Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.722945 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-fc34-account-create-update-whvjm" event={"ID":"608922e5-2248-48a4-bd64-6a0a508ddf23","Type":"ContainerDied","Data":"1b87411c031e3203ffde3639219b64bd3369c3f448e2d3171678b743602ba6ad"} Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.723010 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b87411c031e3203ffde3639219b64bd3369c3f448e2d3171678b743602ba6ad" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.723090 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-fc34-account-create-update-whvjm" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.726146 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g75rg" event={"ID":"b7aca2c9-78a1-48a0-b26c-b19a546eeeba","Type":"ContainerDied","Data":"138ff394b6dc390828abdf5b21d0dc482ac2882a4759be794c016edebaf903cf"} Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.726212 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="138ff394b6dc390828abdf5b21d0dc482ac2882a4759be794c016edebaf903cf" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.726283 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g75rg" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.730048 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"b9ba6f9e177ade30a6e030412df1bef59d77c7d7e14b218e8a5db7aa6b066631"} Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.737805 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e623-account-create-update-vfwn8" event={"ID":"f66acf8f-1ec5-4f7a-aff9-2511af6e9d78","Type":"ContainerDied","Data":"54f68a5c85e1cd3c0b1f394d1147eb138daf9753d1c402dd77d5ce52c31666aa"} Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.737869 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54f68a5c85e1cd3c0b1f394d1147eb138daf9753d1c402dd77d5ce52c31666aa" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.737946 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e623-account-create-update-vfwn8" Feb 16 13:22:28 crc kubenswrapper[4816]: I0216 13:22:28.777129 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-d27p4" podStartSLOduration=2.314625269 podStartE2EDuration="9.777105725s" podCreationTimestamp="2026-02-16 13:22:19 +0000 UTC" firstStartedPulling="2026-02-16 13:22:20.480934362 +0000 UTC m=+1139.807648090" lastFinishedPulling="2026-02-16 13:22:27.943414818 +0000 UTC m=+1147.270128546" observedRunningTime="2026-02-16 13:22:28.736783052 +0000 UTC m=+1148.063496780" watchObservedRunningTime="2026-02-16 13:22:28.777105725 +0000 UTC m=+1148.103819453" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.163174 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.274854 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-sb\") pod \"ff43b918-77f6-4472-ab31-01aebee3adaa\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.274994 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-dns-svc\") pod \"ff43b918-77f6-4472-ab31-01aebee3adaa\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.275047 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7t45\" (UniqueName: \"kubernetes.io/projected/ff43b918-77f6-4472-ab31-01aebee3adaa-kube-api-access-r7t45\") pod \"ff43b918-77f6-4472-ab31-01aebee3adaa\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.275122 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-nb\") pod \"ff43b918-77f6-4472-ab31-01aebee3adaa\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.275167 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-config\") pod \"ff43b918-77f6-4472-ab31-01aebee3adaa\" (UID: \"ff43b918-77f6-4472-ab31-01aebee3adaa\") " Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.281471 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff43b918-77f6-4472-ab31-01aebee3adaa-kube-api-access-r7t45" (OuterVolumeSpecName: "kube-api-access-r7t45") pod "ff43b918-77f6-4472-ab31-01aebee3adaa" (UID: "ff43b918-77f6-4472-ab31-01aebee3adaa"). InnerVolumeSpecName "kube-api-access-r7t45". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.313510 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ff43b918-77f6-4472-ab31-01aebee3adaa" (UID: "ff43b918-77f6-4472-ab31-01aebee3adaa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.313810 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ff43b918-77f6-4472-ab31-01aebee3adaa" (UID: "ff43b918-77f6-4472-ab31-01aebee3adaa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.314959 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-config" (OuterVolumeSpecName: "config") pod "ff43b918-77f6-4472-ab31-01aebee3adaa" (UID: "ff43b918-77f6-4472-ab31-01aebee3adaa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.330817 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ff43b918-77f6-4472-ab31-01aebee3adaa" (UID: "ff43b918-77f6-4472-ab31-01aebee3adaa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.377282 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.377316 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7t45\" (UniqueName: \"kubernetes.io/projected/ff43b918-77f6-4472-ab31-01aebee3adaa-kube-api-access-r7t45\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.377326 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.377335 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.377346 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff43b918-77f6-4472-ab31-01aebee3adaa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.751299 4816 generic.go:334] "Generic (PLEG): container finished" podID="ff43b918-77f6-4472-ab31-01aebee3adaa" containerID="b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8" exitCode=0 Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.751794 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" event={"ID":"ff43b918-77f6-4472-ab31-01aebee3adaa","Type":"ContainerDied","Data":"b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8"} Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.751833 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" event={"ID":"ff43b918-77f6-4472-ab31-01aebee3adaa","Type":"ContainerDied","Data":"2e760c30f77b60b5a0b3854afe5383e4a9b29ebd03439bbe148ed46c3ebc0240"} Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.751851 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-66gzc" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.751855 4816 scope.go:117] "RemoveContainer" containerID="b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.843045 4816 scope.go:117] "RemoveContainer" containerID="0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.873806 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-66gzc"] Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.876849 4816 scope.go:117] "RemoveContainer" containerID="b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8" Feb 16 13:22:29 crc kubenswrapper[4816]: E0216 13:22:29.877252 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8\": container with ID starting with b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8 not found: ID does not exist" containerID="b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.877298 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8"} err="failed to get container status \"b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8\": rpc error: code = NotFound desc = could not find container \"b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8\": container with ID starting with b76040bfeb697b4b48ba7c1f2fde89216bc49ecdf41c6f2319be5eac738ed8b8 not found: ID does not exist" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.877324 4816 scope.go:117] "RemoveContainer" containerID="0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad" Feb 16 13:22:29 crc kubenswrapper[4816]: E0216 13:22:29.877616 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad\": container with ID starting with 0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad not found: ID does not exist" containerID="0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.877683 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad"} err="failed to get container status \"0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad\": rpc error: code = NotFound desc = could not find container \"0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad\": container with ID starting with 0496ec4a84703c4ba6862862569b69c3f1c3bc7b214d4f7c84bb5cee22d0efad not found: ID does not exist" Feb 16 13:22:29 crc kubenswrapper[4816]: I0216 13:22:29.882836 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-66gzc"] Feb 16 13:22:30 crc kubenswrapper[4816]: I0216 13:22:30.764325 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958"} Feb 16 13:22:30 crc kubenswrapper[4816]: I0216 13:22:30.764863 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055"} Feb 16 13:22:30 crc kubenswrapper[4816]: I0216 13:22:30.764881 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0"} Feb 16 13:22:30 crc kubenswrapper[4816]: I0216 13:22:30.764892 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba"} Feb 16 13:22:31 crc kubenswrapper[4816]: I0216 13:22:31.413319 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff43b918-77f6-4472-ab31-01aebee3adaa" path="/var/lib/kubelet/pods/ff43b918-77f6-4472-ab31-01aebee3adaa/volumes" Feb 16 13:22:31 crc kubenswrapper[4816]: I0216 13:22:31.795160 4816 generic.go:334] "Generic (PLEG): container finished" podID="c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39" containerID="e5420356a58f2a742209fa93eeb74cfe936d1655956ca551c31cf64d9fc74339" exitCode=0 Feb 16 13:22:31 crc kubenswrapper[4816]: I0216 13:22:31.795257 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-d27p4" event={"ID":"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39","Type":"ContainerDied","Data":"e5420356a58f2a742209fa93eeb74cfe936d1655956ca551c31cf64d9fc74339"} Feb 16 13:22:31 crc kubenswrapper[4816]: I0216 13:22:31.800020 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df"} Feb 16 13:22:32 crc kubenswrapper[4816]: I0216 13:22:32.819745 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66"} Feb 16 13:22:32 crc kubenswrapper[4816]: I0216 13:22:32.820175 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363"} Feb 16 13:22:32 crc kubenswrapper[4816]: I0216 13:22:32.820201 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad"} Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.334426 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.465860 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-combined-ca-bundle\") pod \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.466017 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-config-data\") pod \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.466126 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js874\" (UniqueName: \"kubernetes.io/projected/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-kube-api-access-js874\") pod \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\" (UID: \"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39\") " Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.470177 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-kube-api-access-js874" (OuterVolumeSpecName: "kube-api-access-js874") pod "c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39" (UID: "c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39"). InnerVolumeSpecName "kube-api-access-js874". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.499062 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39" (UID: "c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.518132 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-config-data" (OuterVolumeSpecName: "config-data") pod "c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39" (UID: "c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.568437 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js874\" (UniqueName: \"kubernetes.io/projected/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-kube-api-access-js874\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.568460 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.568471 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.838359 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-d27p4" event={"ID":"c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39","Type":"ContainerDied","Data":"f286dfaa7dcf6eef9f7fd5e181287288bd139f8f4acf51514cd1abf99af21e64"} Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.838871 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-d27p4" Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.838882 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f286dfaa7dcf6eef9f7fd5e181287288bd139f8f4acf51514cd1abf99af21e64" Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.850975 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5"} Feb 16 13:22:33 crc kubenswrapper[4816]: I0216 13:22:33.851015 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71"} Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.026762 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-7rql6"] Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027056 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff43b918-77f6-4472-ab31-01aebee3adaa" containerName="dnsmasq-dns" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027067 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff43b918-77f6-4472-ab31-01aebee3adaa" containerName="dnsmasq-dns" Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027083 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0d98c91-1333-411c-9307-90e0efd8005b" containerName="mariadb-database-create" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027089 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0d98c91-1333-411c-9307-90e0efd8005b" containerName="mariadb-database-create" Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027103 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff43b918-77f6-4472-ab31-01aebee3adaa" containerName="init" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027109 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff43b918-77f6-4472-ab31-01aebee3adaa" containerName="init" Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027120 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66acf8f-1ec5-4f7a-aff9-2511af6e9d78" containerName="mariadb-account-create-update" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027126 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66acf8f-1ec5-4f7a-aff9-2511af6e9d78" containerName="mariadb-account-create-update" Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027137 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ac96a6-9ece-4d04-994a-70c576c82534" containerName="mariadb-account-create-update" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027143 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ac96a6-9ece-4d04-994a-70c576c82534" containerName="mariadb-account-create-update" Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027152 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39" containerName="keystone-db-sync" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027158 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39" containerName="keystone-db-sync" Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027166 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="608922e5-2248-48a4-bd64-6a0a508ddf23" containerName="mariadb-account-create-update" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027172 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="608922e5-2248-48a4-bd64-6a0a508ddf23" containerName="mariadb-account-create-update" Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027179 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7aca2c9-78a1-48a0-b26c-b19a546eeeba" containerName="mariadb-database-create" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027185 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7aca2c9-78a1-48a0-b26c-b19a546eeeba" containerName="mariadb-database-create" Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027194 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13dd3829-afd7-421b-8caf-6f789f71fc25" containerName="ovn-config" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027200 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="13dd3829-afd7-421b-8caf-6f789f71fc25" containerName="ovn-config" Feb 16 13:22:34 crc kubenswrapper[4816]: E0216 13:22:34.027210 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eac035cf-66af-4b1a-9798-8e7a05975c7e" containerName="mariadb-database-create" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027216 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="eac035cf-66af-4b1a-9798-8e7a05975c7e" containerName="mariadb-database-create" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027385 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff43b918-77f6-4472-ab31-01aebee3adaa" containerName="dnsmasq-dns" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027400 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66acf8f-1ec5-4f7a-aff9-2511af6e9d78" containerName="mariadb-account-create-update" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027409 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="60ac96a6-9ece-4d04-994a-70c576c82534" containerName="mariadb-account-create-update" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027422 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7aca2c9-78a1-48a0-b26c-b19a546eeeba" containerName="mariadb-database-create" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027436 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39" containerName="keystone-db-sync" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027445 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="608922e5-2248-48a4-bd64-6a0a508ddf23" containerName="mariadb-account-create-update" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027452 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="eac035cf-66af-4b1a-9798-8e7a05975c7e" containerName="mariadb-database-create" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027462 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="13dd3829-afd7-421b-8caf-6f789f71fc25" containerName="ovn-config" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.027480 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0d98c91-1333-411c-9307-90e0efd8005b" containerName="mariadb-database-create" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.041309 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.049790 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-7rql6"] Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.060671 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-kkssq"] Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.062495 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.087552 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.091175 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.106184 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6zz64" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.106535 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.106565 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.129181 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kkssq"] Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.199371 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g585g\" (UniqueName: \"kubernetes.io/projected/c978dd63-9f24-4c9b-9fce-2c807372eae6-kube-api-access-g585g\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.199447 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-fernet-keys\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.199489 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbg56\" (UniqueName: \"kubernetes.io/projected/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-kube-api-access-dbg56\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.199538 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-combined-ca-bundle\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.199581 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-sb\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.199611 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-scripts\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.199717 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-config-data\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.199986 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-credential-keys\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.200022 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-config\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.200048 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-nb\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.200101 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-dns-svc\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.301675 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-config-data\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.301820 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-credential-keys\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.301847 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-config\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.301887 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-nb\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.301923 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-dns-svc\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.301959 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g585g\" (UniqueName: \"kubernetes.io/projected/c978dd63-9f24-4c9b-9fce-2c807372eae6-kube-api-access-g585g\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.301982 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-fernet-keys\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.302003 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbg56\" (UniqueName: \"kubernetes.io/projected/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-kube-api-access-dbg56\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.302035 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-combined-ca-bundle\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.302061 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-sb\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.302088 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-scripts\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.303389 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-dns-svc\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.304312 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-config\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.305060 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-sb\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.305225 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-nb\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.312891 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-combined-ca-bundle\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.312966 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-scripts\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.313067 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-config-data\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.315513 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-credential-keys\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.318289 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-fernet-keys\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.333386 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbg56\" (UniqueName: \"kubernetes.io/projected/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-kube-api-access-dbg56\") pod \"keystone-bootstrap-kkssq\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.365869 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g585g\" (UniqueName: \"kubernetes.io/projected/c978dd63-9f24-4c9b-9fce-2c807372eae6-kube-api-access-g585g\") pod \"dnsmasq-dns-7d5679f497-7rql6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.419633 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.446119 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.458484 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.461741 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.470242 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.470738 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.603041 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.636242 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-config-data\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.636341 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-run-httpd\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.636371 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lj2k5\" (UniqueName: \"kubernetes.io/projected/860e76e2-3fc6-4b66-8bb2-2e377153c53b-kube-api-access-lj2k5\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.636401 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-log-httpd\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.636425 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.636469 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-scripts\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.636512 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.692967 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-sqvh5"] Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.694735 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.711469 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.711697 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.711772 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-6vjhh" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739308 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-scripts\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739582 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/af6d642c-ddbc-4faa-8871-ad5556ff1a64-etc-machine-id\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739679 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739724 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-config-data\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739747 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-config-data\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739768 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-db-sync-config-data\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739831 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-scripts\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739860 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-run-httpd\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739886 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgcb2\" (UniqueName: \"kubernetes.io/projected/af6d642c-ddbc-4faa-8871-ad5556ff1a64-kube-api-access-rgcb2\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739926 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lj2k5\" (UniqueName: \"kubernetes.io/projected/860e76e2-3fc6-4b66-8bb2-2e377153c53b-kube-api-access-lj2k5\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739958 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-log-httpd\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.739984 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-combined-ca-bundle\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.740009 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.745290 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-log-httpd\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.749446 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-run-httpd\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.762806 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-config-data\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.766641 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.796347 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-scripts\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.798005 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.798680 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sqvh5"] Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.842796 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/af6d642c-ddbc-4faa-8871-ad5556ff1a64-etc-machine-id\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.842916 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-config-data\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.842940 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-db-sync-config-data\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.843004 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-scripts\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.843028 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgcb2\" (UniqueName: \"kubernetes.io/projected/af6d642c-ddbc-4faa-8871-ad5556ff1a64-kube-api-access-rgcb2\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.843095 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-combined-ca-bundle\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.844605 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/af6d642c-ddbc-4faa-8871-ad5556ff1a64-etc-machine-id\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.863234 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-combined-ca-bundle\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.864372 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lj2k5\" (UniqueName: \"kubernetes.io/projected/860e76e2-3fc6-4b66-8bb2-2e377153c53b-kube-api-access-lj2k5\") pod \"ceilometer-0\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " pod="openstack/ceilometer-0" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.868216 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-db-sync-config-data\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.909761 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-config-data\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.921363 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-scripts\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.921882 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgcb2\" (UniqueName: \"kubernetes.io/projected/af6d642c-ddbc-4faa-8871-ad5556ff1a64-kube-api-access-rgcb2\") pod \"cinder-db-sync-sqvh5\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.953727 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-4sts7"] Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.954861 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.963256 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-lmgvm" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.963472 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.963606 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.997216 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856"} Feb 16 13:22:34 crc kubenswrapper[4816]: I0216 13:22:34.997260 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c"} Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.001030 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.016924 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-7rql6"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.050725 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-config-data\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.050780 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5wph\" (UniqueName: \"kubernetes.io/projected/58201649-b6df-4c32-a1c2-b672eefca745-kube-api-access-c5wph\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.050872 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-scripts\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.050894 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58201649-b6df-4c32-a1c2-b672eefca745-logs\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.050958 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-combined-ca-bundle\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.072099 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-4sts7"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.110761 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-6nfpn"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.112410 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.121537 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.121724 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2t2s8" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.121824 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.126645 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6nfpn"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.140218 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56798b757f-q8g6p"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.141487 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.144492 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.156828 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-scripts\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.156865 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-config\") pod \"neutron-db-sync-6nfpn\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.156883 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58201649-b6df-4c32-a1c2-b672eefca745-logs\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.156932 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v88p4\" (UniqueName: \"kubernetes.io/projected/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-kube-api-access-v88p4\") pod \"neutron-db-sync-6nfpn\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.156955 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-combined-ca-bundle\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.157004 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-config-data\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.157025 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5wph\" (UniqueName: \"kubernetes.io/projected/58201649-b6df-4c32-a1c2-b672eefca745-kube-api-access-c5wph\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.157043 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-combined-ca-bundle\") pod \"neutron-db-sync-6nfpn\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.157844 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58201649-b6df-4c32-a1c2-b672eefca745-logs\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.184345 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-combined-ca-bundle\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.184552 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5wph\" (UniqueName: \"kubernetes.io/projected/58201649-b6df-4c32-a1c2-b672eefca745-kube-api-access-c5wph\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.193530 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-q8g6p"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.195761 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-scripts\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.196395 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-config-data\") pod \"placement-db-sync-4sts7\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.205733 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-8mdth"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.229222 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-8mdth"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.229336 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.233588 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-lwnff" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.233937 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.257120 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kkssq"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.259331 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v88p4\" (UniqueName: \"kubernetes.io/projected/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-kube-api-access-v88p4\") pod \"neutron-db-sync-6nfpn\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.259422 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-dns-svc\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.259474 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-db-sync-config-data\") pod \"barbican-db-sync-8mdth\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.259510 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-combined-ca-bundle\") pod \"neutron-db-sync-6nfpn\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.259531 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-config\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.259574 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-nb\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.259607 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-sb\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.259630 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-combined-ca-bundle\") pod \"barbican-db-sync-8mdth\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.263601 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-config\") pod \"neutron-db-sync-6nfpn\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.263676 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xzhw\" (UniqueName: \"kubernetes.io/projected/0731f364-690d-4bf9-a86f-cbaa984c62c8-kube-api-access-8xzhw\") pod \"barbican-db-sync-8mdth\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.263705 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt7lp\" (UniqueName: \"kubernetes.io/projected/8b407ed1-1b79-41c4-862f-589df8b95a09-kube-api-access-tt7lp\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.273576 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-config\") pod \"neutron-db-sync-6nfpn\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: W0216 13:22:35.286521 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda89df6fa_b1ec_43f3_9a69_5ec89ecde79e.slice/crio-df017dcc3cde0761325d4200625d8bc64adb320afb72fc3456b554d4c86daadb WatchSource:0}: Error finding container df017dcc3cde0761325d4200625d8bc64adb320afb72fc3456b554d4c86daadb: Status 404 returned error can't find the container with id df017dcc3cde0761325d4200625d8bc64adb320afb72fc3456b554d4c86daadb Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.286550 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v88p4\" (UniqueName: \"kubernetes.io/projected/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-kube-api-access-v88p4\") pod \"neutron-db-sync-6nfpn\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.291360 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-combined-ca-bundle\") pod \"neutron-db-sync-6nfpn\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.301294 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.303015 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.314342 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.314674 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.315242 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.315532 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-8xfml" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.316107 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.355383 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4sts7" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.367921 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-db-sync-config-data\") pod \"barbican-db-sync-8mdth\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.367988 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-scripts\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368021 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368097 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-config\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368144 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-logs\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368202 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368225 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-nb\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368258 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368279 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-sb\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368307 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-combined-ca-bundle\") pod \"barbican-db-sync-8mdth\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368360 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368414 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xzhw\" (UniqueName: \"kubernetes.io/projected/0731f364-690d-4bf9-a86f-cbaa984c62c8-kube-api-access-8xzhw\") pod \"barbican-db-sync-8mdth\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368445 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt7lp\" (UniqueName: \"kubernetes.io/projected/8b407ed1-1b79-41c4-862f-589df8b95a09-kube-api-access-tt7lp\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368503 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcws7\" (UniqueName: \"kubernetes.io/projected/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-kube-api-access-zcws7\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368553 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-config-data\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.368598 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-dns-svc\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.369851 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-dns-svc\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.370773 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-config\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.371507 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-nb\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.374296 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.376387 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-sb\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.377051 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.377640 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-db-sync-config-data\") pod \"barbican-db-sync-8mdth\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.381396 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.381694 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.386836 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-combined-ca-bundle\") pod \"barbican-db-sync-8mdth\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.408312 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xzhw\" (UniqueName: \"kubernetes.io/projected/0731f364-690d-4bf9-a86f-cbaa984c62c8-kube-api-access-8xzhw\") pod \"barbican-db-sync-8mdth\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.415515 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt7lp\" (UniqueName: \"kubernetes.io/projected/8b407ed1-1b79-41c4-862f-589df8b95a09-kube-api-access-tt7lp\") pod \"dnsmasq-dns-56798b757f-q8g6p\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.423426 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.445046 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.471563 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcws7\" (UniqueName: \"kubernetes.io/projected/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-kube-api-access-zcws7\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.471628 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-config-data\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.471697 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-scripts\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.471716 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.471751 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-logs\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.471778 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.471800 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.471826 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.473524 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.485561 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.487997 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.490108 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-logs\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.490245 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.503780 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-config-data\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.506278 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.508220 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-scripts\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.538179 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcws7\" (UniqueName: \"kubernetes.io/projected/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-kube-api-access-zcws7\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.599075 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.599468 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-7rql6"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.600301 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8mdth" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.607486 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-logs\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.607584 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.607620 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.607758 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.607883 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.608035 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.608068 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.608117 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg969\" (UniqueName: \"kubernetes.io/projected/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-kube-api-access-tg969\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.652409 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.711471 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.711559 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.711637 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.711713 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.711793 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.711833 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.711868 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg969\" (UniqueName: \"kubernetes.io/projected/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-kube-api-access-tg969\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.711909 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-logs\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.712518 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-logs\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.715028 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.715754 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.719614 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sqvh5"] Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.744233 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.745205 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.746242 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.748799 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.757794 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg969\" (UniqueName: \"kubernetes.io/projected/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-kube-api-access-tg969\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.777073 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:35 crc kubenswrapper[4816]: I0216 13:22:35.811897 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:22:35 crc kubenswrapper[4816]: W0216 13:22:35.939562 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod860e76e2_3fc6_4b66_8bb2_2e377153c53b.slice/crio-b4cbf23e42f1c15a0d0273c776d4b9288a4fa85b86f700881a3b7530c658f545 WatchSource:0}: Error finding container b4cbf23e42f1c15a0d0273c776d4b9288a4fa85b86f700881a3b7530c658f545: Status 404 returned error can't find the container with id b4cbf23e42f1c15a0d0273c776d4b9288a4fa85b86f700881a3b7530c658f545 Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.034150 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.054907 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kkssq" event={"ID":"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e","Type":"ContainerStarted","Data":"df017dcc3cde0761325d4200625d8bc64adb320afb72fc3456b554d4c86daadb"} Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.099831 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sqvh5" event={"ID":"af6d642c-ddbc-4faa-8871-ad5556ff1a64","Type":"ContainerStarted","Data":"09afed434d231bdcb0c85436275e7373b88bca26134aa26d1a699b1f49d1709e"} Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.146584 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d5679f497-7rql6" event={"ID":"c978dd63-9f24-4c9b-9fce-2c807372eae6","Type":"ContainerStarted","Data":"e2018cb76e18cae059649017e8a29ab596b8e6b5bbb6bccb1f779f0c91daef00"} Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.335107 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f"} Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.335184 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837"} Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.341493 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerStarted","Data":"b4cbf23e42f1c15a0d0273c776d4b9288a4fa85b86f700881a3b7530c658f545"} Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.356926 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-4sts7"] Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.391748 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6nfpn"] Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.627625 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-q8g6p"] Feb 16 13:22:36 crc kubenswrapper[4816]: W0216 13:22:36.681625 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b407ed1_1b79_41c4_862f_589df8b95a09.slice/crio-16e3c6105879ecfb3c166c103d37a5e341fb0e55c4bb7bae6ef2a19f72700e9e WatchSource:0}: Error finding container 16e3c6105879ecfb3c166c103d37a5e341fb0e55c4bb7bae6ef2a19f72700e9e: Status 404 returned error can't find the container with id 16e3c6105879ecfb3c166c103d37a5e341fb0e55c4bb7bae6ef2a19f72700e9e Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.728743 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-8mdth"] Feb 16 13:22:36 crc kubenswrapper[4816]: I0216 13:22:36.966021 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.142033 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.371751 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerStarted","Data":"b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.374227 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb3bc258-befb-4186-bc08-6c7e6ff9a49f","Type":"ContainerStarted","Data":"fdffb8ef70c99126f8da95a4bc55dd39471351d442e24719176216b458e588d0"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.391397 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kkssq" event={"ID":"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e","Type":"ContainerStarted","Data":"b657086e02b8ce8e458e27dbbbf41ae11481a85b47a0269bc3b1dae982a47a7e"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.425521 4816 generic.go:334] "Generic (PLEG): container finished" podID="8b407ed1-1b79-41c4-862f-589df8b95a09" containerID="5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67" exitCode=0 Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.457266 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3ed4630-9bfe-4c17-bfab-eb915f1871b9","Type":"ContainerStarted","Data":"5c9b73495e99e76c24bfee7bd3274340b8c66c142a1aec9afc7bf03169342d7c"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.457327 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" event={"ID":"8b407ed1-1b79-41c4-862f-589df8b95a09","Type":"ContainerDied","Data":"5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.457345 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" event={"ID":"8b407ed1-1b79-41c4-862f-589df8b95a09","Type":"ContainerStarted","Data":"16e3c6105879ecfb3c166c103d37a5e341fb0e55c4bb7bae6ef2a19f72700e9e"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.461585 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=38.772869141 podStartE2EDuration="43.461558104s" podCreationTimestamp="2026-02-16 13:21:54 +0000 UTC" firstStartedPulling="2026-02-16 13:22:28.584513999 +0000 UTC m=+1147.911227737" lastFinishedPulling="2026-02-16 13:22:33.273202972 +0000 UTC m=+1152.599916700" observedRunningTime="2026-02-16 13:22:37.414294693 +0000 UTC m=+1156.741008421" watchObservedRunningTime="2026-02-16 13:22:37.461558104 +0000 UTC m=+1156.788271832" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.480357 4816 generic.go:334] "Generic (PLEG): container finished" podID="c978dd63-9f24-4c9b-9fce-2c807372eae6" containerID="a625426ff17be9082e08f118cf5e545d8fa4fbe177ae13078530e23f567a21ef" exitCode=0 Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.480424 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d5679f497-7rql6" event={"ID":"c978dd63-9f24-4c9b-9fce-2c807372eae6","Type":"ContainerDied","Data":"a625426ff17be9082e08f118cf5e545d8fa4fbe177ae13078530e23f567a21ef"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.526571 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4sts7" event={"ID":"58201649-b6df-4c32-a1c2-b672eefca745","Type":"ContainerStarted","Data":"f27ec706c1005cb99f36920350b302e1a020f80345d46432c243d0357fffe2b9"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.537253 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8mdth" event={"ID":"0731f364-690d-4bf9-a86f-cbaa984c62c8","Type":"ContainerStarted","Data":"9f46a0eb5b270e8aeb9d7cb3adc664a1bcc8867e0c01f4f916e37f8e31465c65"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.554105 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-kkssq" podStartSLOduration=3.554084554 podStartE2EDuration="3.554084554s" podCreationTimestamp="2026-02-16 13:22:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:22:37.457803462 +0000 UTC m=+1156.784517190" watchObservedRunningTime="2026-02-16 13:22:37.554084554 +0000 UTC m=+1156.880798272" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.598334 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.645776 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6nfpn" event={"ID":"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060","Type":"ContainerStarted","Data":"cf8f50b8e01719c98fb9295479623e2775e7238b5f15f7ebf0bc469d7d0b9a1b"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.645825 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6nfpn" event={"ID":"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060","Type":"ContainerStarted","Data":"0965e7d1adbd2e2c8219f553c3911c9af38b142aea1a19986fe339097192bd60"} Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.683300 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.723748 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.728899 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-6nfpn" podStartSLOduration=3.728872374 podStartE2EDuration="3.728872374s" podCreationTimestamp="2026-02-16 13:22:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:22:37.688584852 +0000 UTC m=+1157.015298580" watchObservedRunningTime="2026-02-16 13:22:37.728872374 +0000 UTC m=+1157.055586102" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.928959 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-q8g6p"] Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.976772 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-lgpxq"] Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.978279 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.985850 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.985913 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.985961 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.985980 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.986006 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdl4r\" (UniqueName: \"kubernetes.io/projected/d9735670-f4f7-4da6-8985-58eba2625c2c-kube-api-access-mdl4r\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.986038 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-config\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.986646 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-lgpxq"] Feb 16 13:22:37 crc kubenswrapper[4816]: I0216 13:22:37.988381 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.089752 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.089841 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.089912 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.089937 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.089974 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdl4r\" (UniqueName: \"kubernetes.io/projected/d9735670-f4f7-4da6-8985-58eba2625c2c-kube-api-access-mdl4r\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.090020 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-config\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.091568 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.091774 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-config\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.092166 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.092631 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.092701 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.127350 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdl4r\" (UniqueName: \"kubernetes.io/projected/d9735670-f4f7-4da6-8985-58eba2625c2c-kube-api-access-mdl4r\") pod \"dnsmasq-dns-57c957c4ff-lgpxq\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.145972 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.161198 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:38 crc kubenswrapper[4816]: E0216 13:22:38.274189 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13dd3829_afd7_421b_8caf_6f789f71fc25.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.296108 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g585g\" (UniqueName: \"kubernetes.io/projected/c978dd63-9f24-4c9b-9fce-2c807372eae6-kube-api-access-g585g\") pod \"c978dd63-9f24-4c9b-9fce-2c807372eae6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.296178 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-dns-svc\") pod \"c978dd63-9f24-4c9b-9fce-2c807372eae6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.296371 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-nb\") pod \"c978dd63-9f24-4c9b-9fce-2c807372eae6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.296478 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-sb\") pod \"c978dd63-9f24-4c9b-9fce-2c807372eae6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.296594 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-config\") pod \"c978dd63-9f24-4c9b-9fce-2c807372eae6\" (UID: \"c978dd63-9f24-4c9b-9fce-2c807372eae6\") " Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.306209 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c978dd63-9f24-4c9b-9fce-2c807372eae6-kube-api-access-g585g" (OuterVolumeSpecName: "kube-api-access-g585g") pod "c978dd63-9f24-4c9b-9fce-2c807372eae6" (UID: "c978dd63-9f24-4c9b-9fce-2c807372eae6"). InnerVolumeSpecName "kube-api-access-g585g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.328320 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c978dd63-9f24-4c9b-9fce-2c807372eae6" (UID: "c978dd63-9f24-4c9b-9fce-2c807372eae6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.334218 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c978dd63-9f24-4c9b-9fce-2c807372eae6" (UID: "c978dd63-9f24-4c9b-9fce-2c807372eae6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.342426 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c978dd63-9f24-4c9b-9fce-2c807372eae6" (UID: "c978dd63-9f24-4c9b-9fce-2c807372eae6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.355673 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-config" (OuterVolumeSpecName: "config") pod "c978dd63-9f24-4c9b-9fce-2c807372eae6" (UID: "c978dd63-9f24-4c9b-9fce-2c807372eae6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.403925 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.403997 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.404032 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.404056 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g585g\" (UniqueName: \"kubernetes.io/projected/c978dd63-9f24-4c9b-9fce-2c807372eae6-kube-api-access-g585g\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.404106 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c978dd63-9f24-4c9b-9fce-2c807372eae6-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.689681 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb3bc258-befb-4186-bc08-6c7e6ff9a49f","Type":"ContainerStarted","Data":"f195b0d2d37f2e7ab787be8c081e9c312e85e2fe84874b438f9e329e4468eaf0"} Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.708967 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" podUID="8b407ed1-1b79-41c4-862f-589df8b95a09" containerName="dnsmasq-dns" containerID="cri-o://142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63" gracePeriod=10 Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.709087 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" event={"ID":"8b407ed1-1b79-41c4-862f-589df8b95a09","Type":"ContainerStarted","Data":"142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63"} Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.709555 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.729586 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d5679f497-7rql6" event={"ID":"c978dd63-9f24-4c9b-9fce-2c807372eae6","Type":"ContainerDied","Data":"e2018cb76e18cae059649017e8a29ab596b8e6b5bbb6bccb1f779f0c91daef00"} Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.729646 4816 scope.go:117] "RemoveContainer" containerID="a625426ff17be9082e08f118cf5e545d8fa4fbe177ae13078530e23f567a21ef" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.729949 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d5679f497-7rql6" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.746008 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" podStartSLOduration=4.745905206 podStartE2EDuration="4.745905206s" podCreationTimestamp="2026-02-16 13:22:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:22:38.742678648 +0000 UTC m=+1158.069392376" watchObservedRunningTime="2026-02-16 13:22:38.745905206 +0000 UTC m=+1158.072618934" Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.801812 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-lgpxq"] Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.907123 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-7rql6"] Feb 16 13:22:38 crc kubenswrapper[4816]: I0216 13:22:38.938600 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-7rql6"] Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.276888 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.429441 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-dns-svc\") pod \"8b407ed1-1b79-41c4-862f-589df8b95a09\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.429542 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-config\") pod \"8b407ed1-1b79-41c4-862f-589df8b95a09\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.429567 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-nb\") pod \"8b407ed1-1b79-41c4-862f-589df8b95a09\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.429617 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt7lp\" (UniqueName: \"kubernetes.io/projected/8b407ed1-1b79-41c4-862f-589df8b95a09-kube-api-access-tt7lp\") pod \"8b407ed1-1b79-41c4-862f-589df8b95a09\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.429689 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-sb\") pod \"8b407ed1-1b79-41c4-862f-589df8b95a09\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.438256 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c978dd63-9f24-4c9b-9fce-2c807372eae6" path="/var/lib/kubelet/pods/c978dd63-9f24-4c9b-9fce-2c807372eae6/volumes" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.476753 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b407ed1-1b79-41c4-862f-589df8b95a09-kube-api-access-tt7lp" (OuterVolumeSpecName: "kube-api-access-tt7lp") pod "8b407ed1-1b79-41c4-862f-589df8b95a09" (UID: "8b407ed1-1b79-41c4-862f-589df8b95a09"). InnerVolumeSpecName "kube-api-access-tt7lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.523648 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8b407ed1-1b79-41c4-862f-589df8b95a09" (UID: "8b407ed1-1b79-41c4-862f-589df8b95a09"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.532158 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-config" (OuterVolumeSpecName: "config") pod "8b407ed1-1b79-41c4-862f-589df8b95a09" (UID: "8b407ed1-1b79-41c4-862f-589df8b95a09"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.532507 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-config\") pod \"8b407ed1-1b79-41c4-862f-589df8b95a09\" (UID: \"8b407ed1-1b79-41c4-862f-589df8b95a09\") " Feb 16 13:22:39 crc kubenswrapper[4816]: W0216 13:22:39.532639 4816 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/8b407ed1-1b79-41c4-862f-589df8b95a09/volumes/kubernetes.io~configmap/config Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.532674 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-config" (OuterVolumeSpecName: "config") pod "8b407ed1-1b79-41c4-862f-589df8b95a09" (UID: "8b407ed1-1b79-41c4-862f-589df8b95a09"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.532922 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.532946 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt7lp\" (UniqueName: \"kubernetes.io/projected/8b407ed1-1b79-41c4-862f-589df8b95a09-kube-api-access-tt7lp\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.532957 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.535752 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8b407ed1-1b79-41c4-862f-589df8b95a09" (UID: "8b407ed1-1b79-41c4-862f-589df8b95a09"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.572395 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8b407ed1-1b79-41c4-862f-589df8b95a09" (UID: "8b407ed1-1b79-41c4-862f-589df8b95a09"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.635981 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.636018 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b407ed1-1b79-41c4-862f-589df8b95a09-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.772388 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3ed4630-9bfe-4c17-bfab-eb915f1871b9","Type":"ContainerStarted","Data":"f8ae73c126c363833f99df33e58ee290003dea459fae306bda2b1c0a48b29dd4"} Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.779067 4816 generic.go:334] "Generic (PLEG): container finished" podID="8b407ed1-1b79-41c4-862f-589df8b95a09" containerID="142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63" exitCode=0 Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.779134 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" event={"ID":"8b407ed1-1b79-41c4-862f-589df8b95a09","Type":"ContainerDied","Data":"142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63"} Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.779165 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" event={"ID":"8b407ed1-1b79-41c4-862f-589df8b95a09","Type":"ContainerDied","Data":"16e3c6105879ecfb3c166c103d37a5e341fb0e55c4bb7bae6ef2a19f72700e9e"} Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.779185 4816 scope.go:117] "RemoveContainer" containerID="142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.779336 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-q8g6p" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.833531 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-q8g6p"] Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.837148 4816 generic.go:334] "Generic (PLEG): container finished" podID="d9735670-f4f7-4da6-8985-58eba2625c2c" containerID="3156059089ec21e28a45ad80fc0007678973725b9d2a881f436cab57f3835c22" exitCode=0 Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.837225 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" event={"ID":"d9735670-f4f7-4da6-8985-58eba2625c2c","Type":"ContainerDied","Data":"3156059089ec21e28a45ad80fc0007678973725b9d2a881f436cab57f3835c22"} Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.837252 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" event={"ID":"d9735670-f4f7-4da6-8985-58eba2625c2c","Type":"ContainerStarted","Data":"4f7ae28b84513960e14b7d97ed9bfa82b7bee59a6d07a3226de2820d90dd5743"} Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.840359 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-q8g6p"] Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.852671 4816 scope.go:117] "RemoveContainer" containerID="5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67" Feb 16 13:22:39 crc kubenswrapper[4816]: I0216 13:22:39.997995 4816 scope.go:117] "RemoveContainer" containerID="142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63" Feb 16 13:22:40 crc kubenswrapper[4816]: E0216 13:22:40.003468 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63\": container with ID starting with 142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63 not found: ID does not exist" containerID="142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63" Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.003512 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63"} err="failed to get container status \"142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63\": rpc error: code = NotFound desc = could not find container \"142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63\": container with ID starting with 142dbda25b94f8752e7d5a0d406e5703cca1c1b0a0ff1bca0ccd9e8b4a63ef63 not found: ID does not exist" Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.003538 4816 scope.go:117] "RemoveContainer" containerID="5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67" Feb 16 13:22:40 crc kubenswrapper[4816]: E0216 13:22:40.004054 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67\": container with ID starting with 5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67 not found: ID does not exist" containerID="5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67" Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.004079 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67"} err="failed to get container status \"5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67\": rpc error: code = NotFound desc = could not find container \"5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67\": container with ID starting with 5c523f266ca77a285df4ba8bb19254cdb577d5fdc636d83b197fa998f9d10d67 not found: ID does not exist" Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.849020 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3ed4630-9bfe-4c17-bfab-eb915f1871b9","Type":"ContainerStarted","Data":"0d91b1adca95b091698777b9b7071ab6794bc04d84d64b9e208e219713418072"} Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.849126 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerName="glance-log" containerID="cri-o://f8ae73c126c363833f99df33e58ee290003dea459fae306bda2b1c0a48b29dd4" gracePeriod=30 Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.849206 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerName="glance-httpd" containerID="cri-o://0d91b1adca95b091698777b9b7071ab6794bc04d84d64b9e208e219713418072" gracePeriod=30 Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.858516 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerName="glance-log" containerID="cri-o://f195b0d2d37f2e7ab787be8c081e9c312e85e2fe84874b438f9e329e4468eaf0" gracePeriod=30 Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.858809 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb3bc258-befb-4186-bc08-6c7e6ff9a49f","Type":"ContainerStarted","Data":"8bc8fe49a5ee2cfa9fa422c7744b7fb3e34159f69a3b5e08628e248066222a49"} Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.858824 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerName="glance-httpd" containerID="cri-o://8bc8fe49a5ee2cfa9fa422c7744b7fb3e34159f69a3b5e08628e248066222a49" gracePeriod=30 Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.870839 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" event={"ID":"d9735670-f4f7-4da6-8985-58eba2625c2c","Type":"ContainerStarted","Data":"e8d1274ef3521c1907ffc55521056ccbd0722d4f2e08b68bcfa5cbfba0063961"} Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.875495 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.886238 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.886214573 podStartE2EDuration="6.886214573s" podCreationTimestamp="2026-02-16 13:22:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:22:40.872325803 +0000 UTC m=+1160.199039541" watchObservedRunningTime="2026-02-16 13:22:40.886214573 +0000 UTC m=+1160.212928301" Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.897779 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" podStartSLOduration=3.897758719 podStartE2EDuration="3.897758719s" podCreationTimestamp="2026-02-16 13:22:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:22:40.896850404 +0000 UTC m=+1160.223564132" watchObservedRunningTime="2026-02-16 13:22:40.897758719 +0000 UTC m=+1160.224472447" Feb 16 13:22:40 crc kubenswrapper[4816]: I0216 13:22:40.930432 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.930411682 podStartE2EDuration="6.930411682s" podCreationTimestamp="2026-02-16 13:22:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:22:40.924532841 +0000 UTC m=+1160.251246569" watchObservedRunningTime="2026-02-16 13:22:40.930411682 +0000 UTC m=+1160.257125410" Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.417919 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b407ed1-1b79-41c4-862f-589df8b95a09" path="/var/lib/kubelet/pods/8b407ed1-1b79-41c4-862f-589df8b95a09/volumes" Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.891024 4816 generic.go:334] "Generic (PLEG): container finished" podID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerID="8bc8fe49a5ee2cfa9fa422c7744b7fb3e34159f69a3b5e08628e248066222a49" exitCode=0 Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.891090 4816 generic.go:334] "Generic (PLEG): container finished" podID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerID="f195b0d2d37f2e7ab787be8c081e9c312e85e2fe84874b438f9e329e4468eaf0" exitCode=143 Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.891144 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb3bc258-befb-4186-bc08-6c7e6ff9a49f","Type":"ContainerDied","Data":"8bc8fe49a5ee2cfa9fa422c7744b7fb3e34159f69a3b5e08628e248066222a49"} Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.891195 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb3bc258-befb-4186-bc08-6c7e6ff9a49f","Type":"ContainerDied","Data":"f195b0d2d37f2e7ab787be8c081e9c312e85e2fe84874b438f9e329e4468eaf0"} Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.893081 4816 generic.go:334] "Generic (PLEG): container finished" podID="a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" containerID="b657086e02b8ce8e458e27dbbbf41ae11481a85b47a0269bc3b1dae982a47a7e" exitCode=0 Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.893119 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kkssq" event={"ID":"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e","Type":"ContainerDied","Data":"b657086e02b8ce8e458e27dbbbf41ae11481a85b47a0269bc3b1dae982a47a7e"} Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.896632 4816 generic.go:334] "Generic (PLEG): container finished" podID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerID="0d91b1adca95b091698777b9b7071ab6794bc04d84d64b9e208e219713418072" exitCode=0 Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.896698 4816 generic.go:334] "Generic (PLEG): container finished" podID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerID="f8ae73c126c363833f99df33e58ee290003dea459fae306bda2b1c0a48b29dd4" exitCode=143 Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.896710 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3ed4630-9bfe-4c17-bfab-eb915f1871b9","Type":"ContainerDied","Data":"0d91b1adca95b091698777b9b7071ab6794bc04d84d64b9e208e219713418072"} Feb 16 13:22:41 crc kubenswrapper[4816]: I0216 13:22:41.896759 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3ed4630-9bfe-4c17-bfab-eb915f1871b9","Type":"ContainerDied","Data":"f8ae73c126c363833f99df33e58ee290003dea459fae306bda2b1c0a48b29dd4"} Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.644774 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.658345 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-scripts\") pod \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.658457 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-credential-keys\") pod \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.658569 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-fernet-keys\") pod \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.658599 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbg56\" (UniqueName: \"kubernetes.io/projected/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-kube-api-access-dbg56\") pod \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.658737 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-config-data\") pod \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.658867 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-combined-ca-bundle\") pod \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\" (UID: \"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e\") " Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.669232 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-scripts" (OuterVolumeSpecName: "scripts") pod "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" (UID: "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.681820 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" (UID: "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.683012 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" (UID: "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.692441 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-kube-api-access-dbg56" (OuterVolumeSpecName: "kube-api-access-dbg56") pod "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" (UID: "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e"). InnerVolumeSpecName "kube-api-access-dbg56". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.716731 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" (UID: "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.722542 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-config-data" (OuterVolumeSpecName: "config-data") pod "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" (UID: "a89df6fa-b1ec-43f3-9a69-5ec89ecde79e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.761040 4816 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.761077 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbg56\" (UniqueName: \"kubernetes.io/projected/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-kube-api-access-dbg56\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.761091 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.761101 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.761113 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.761124 4816 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.961897 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kkssq" event={"ID":"a89df6fa-b1ec-43f3-9a69-5ec89ecde79e","Type":"ContainerDied","Data":"df017dcc3cde0761325d4200625d8bc64adb320afb72fc3456b554d4c86daadb"} Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.961936 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df017dcc3cde0761325d4200625d8bc64adb320afb72fc3456b554d4c86daadb" Feb 16 13:22:46 crc kubenswrapper[4816]: I0216 13:22:46.961946 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kkssq" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.828906 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-kkssq"] Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.834976 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-kkssq"] Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.942698 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-ndrvk"] Feb 16 13:22:47 crc kubenswrapper[4816]: E0216 13:22:47.943094 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c978dd63-9f24-4c9b-9fce-2c807372eae6" containerName="init" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.943110 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c978dd63-9f24-4c9b-9fce-2c807372eae6" containerName="init" Feb 16 13:22:47 crc kubenswrapper[4816]: E0216 13:22:47.943121 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b407ed1-1b79-41c4-862f-589df8b95a09" containerName="init" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.943128 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b407ed1-1b79-41c4-862f-589df8b95a09" containerName="init" Feb 16 13:22:47 crc kubenswrapper[4816]: E0216 13:22:47.943140 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" containerName="keystone-bootstrap" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.943147 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" containerName="keystone-bootstrap" Feb 16 13:22:47 crc kubenswrapper[4816]: E0216 13:22:47.943158 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b407ed1-1b79-41c4-862f-589df8b95a09" containerName="dnsmasq-dns" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.943164 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b407ed1-1b79-41c4-862f-589df8b95a09" containerName="dnsmasq-dns" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.943316 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" containerName="keystone-bootstrap" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.943327 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c978dd63-9f24-4c9b-9fce-2c807372eae6" containerName="init" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.943338 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b407ed1-1b79-41c4-862f-589df8b95a09" containerName="dnsmasq-dns" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.944097 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.947064 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.947267 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.947397 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6zz64" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.947666 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.947855 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.951868 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ndrvk"] Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.980398 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2qkp\" (UniqueName: \"kubernetes.io/projected/f206b784-e69e-420e-a975-95d7e72f7a30-kube-api-access-h2qkp\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.980480 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-fernet-keys\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.980542 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-scripts\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.980566 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-credential-keys\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.980727 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-combined-ca-bundle\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:47 crc kubenswrapper[4816]: I0216 13:22:47.980918 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-config-data\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.082920 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-fernet-keys\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.082971 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-scripts\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.083005 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-credential-keys\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.083051 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-combined-ca-bundle\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.083165 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-config-data\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.083207 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2qkp\" (UniqueName: \"kubernetes.io/projected/f206b784-e69e-420e-a975-95d7e72f7a30-kube-api-access-h2qkp\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.089020 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-scripts\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.089459 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-config-data\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.090103 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-combined-ca-bundle\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.090198 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-fernet-keys\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.094071 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-credential-keys\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.098491 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2qkp\" (UniqueName: \"kubernetes.io/projected/f206b784-e69e-420e-a975-95d7e72f7a30-kube-api-access-h2qkp\") pod \"keystone-bootstrap-ndrvk\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.147848 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.215236 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2qvgd"] Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.215506 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" podUID="90566558-9f0d-4487-a86c-4ef20464421b" containerName="dnsmasq-dns" containerID="cri-o://fdbc95b7458ad30ac590cacf9be1c0879275975b764f1daeae329a1db2e0aa82" gracePeriod=10 Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.265247 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:22:48 crc kubenswrapper[4816]: E0216 13:22:48.527303 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13dd3829_afd7_421b_8caf_6f789f71fc25.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.573744 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" podUID="90566558-9f0d-4487-a86c-4ef20464421b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.981080 4816 generic.go:334] "Generic (PLEG): container finished" podID="90566558-9f0d-4487-a86c-4ef20464421b" containerID="fdbc95b7458ad30ac590cacf9be1c0879275975b764f1daeae329a1db2e0aa82" exitCode=0 Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.981169 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" event={"ID":"90566558-9f0d-4487-a86c-4ef20464421b","Type":"ContainerDied","Data":"fdbc95b7458ad30ac590cacf9be1c0879275975b764f1daeae329a1db2e0aa82"} Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.983682 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3ed4630-9bfe-4c17-bfab-eb915f1871b9","Type":"ContainerDied","Data":"5c9b73495e99e76c24bfee7bd3274340b8c66c142a1aec9afc7bf03169342d7c"} Feb 16 13:22:48 crc kubenswrapper[4816]: I0216 13:22:48.983713 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c9b73495e99e76c24bfee7bd3274340b8c66c142a1aec9afc7bf03169342d7c" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.071713 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.110925 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-scripts\") pod \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.110997 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-httpd-run\") pod \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.111099 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tg969\" (UniqueName: \"kubernetes.io/projected/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-kube-api-access-tg969\") pod \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.111144 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.111183 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-internal-tls-certs\") pod \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.111217 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-config-data\") pod \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.111280 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-logs\") pod \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.111297 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-combined-ca-bundle\") pod \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\" (UID: \"c3ed4630-9bfe-4c17-bfab-eb915f1871b9\") " Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.116234 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c3ed4630-9bfe-4c17-bfab-eb915f1871b9" (UID: "c3ed4630-9bfe-4c17-bfab-eb915f1871b9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.120708 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-logs" (OuterVolumeSpecName: "logs") pod "c3ed4630-9bfe-4c17-bfab-eb915f1871b9" (UID: "c3ed4630-9bfe-4c17-bfab-eb915f1871b9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.122386 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "c3ed4630-9bfe-4c17-bfab-eb915f1871b9" (UID: "c3ed4630-9bfe-4c17-bfab-eb915f1871b9"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.124588 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-kube-api-access-tg969" (OuterVolumeSpecName: "kube-api-access-tg969") pod "c3ed4630-9bfe-4c17-bfab-eb915f1871b9" (UID: "c3ed4630-9bfe-4c17-bfab-eb915f1871b9"). InnerVolumeSpecName "kube-api-access-tg969". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.126936 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-scripts" (OuterVolumeSpecName: "scripts") pod "c3ed4630-9bfe-4c17-bfab-eb915f1871b9" (UID: "c3ed4630-9bfe-4c17-bfab-eb915f1871b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.170906 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3ed4630-9bfe-4c17-bfab-eb915f1871b9" (UID: "c3ed4630-9bfe-4c17-bfab-eb915f1871b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.183484 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c3ed4630-9bfe-4c17-bfab-eb915f1871b9" (UID: "c3ed4630-9bfe-4c17-bfab-eb915f1871b9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.213956 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tg969\" (UniqueName: \"kubernetes.io/projected/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-kube-api-access-tg969\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.213999 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.214010 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.214019 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.214030 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.214038 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.214046 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.215859 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-config-data" (OuterVolumeSpecName: "config-data") pod "c3ed4630-9bfe-4c17-bfab-eb915f1871b9" (UID: "c3ed4630-9bfe-4c17-bfab-eb915f1871b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.237849 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.316355 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.316822 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3ed4630-9bfe-4c17-bfab-eb915f1871b9-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.428302 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a89df6fa-b1ec-43f3-9a69-5ec89ecde79e" path="/var/lib/kubelet/pods/a89df6fa-b1ec-43f3-9a69-5ec89ecde79e/volumes" Feb 16 13:22:49 crc kubenswrapper[4816]: I0216 13:22:49.991213 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.035421 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.035496 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.088915 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:22:50 crc kubenswrapper[4816]: E0216 13:22:50.089306 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerName="glance-log" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.089320 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerName="glance-log" Feb 16 13:22:50 crc kubenswrapper[4816]: E0216 13:22:50.089345 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerName="glance-httpd" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.089351 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerName="glance-httpd" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.089531 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerName="glance-httpd" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.089549 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" containerName="glance-log" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.090486 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.092830 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.099991 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.102983 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.170468 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.170707 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.170845 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.170926 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.170966 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-logs\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.170987 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.171097 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgjs8\" (UniqueName: \"kubernetes.io/projected/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-kube-api-access-fgjs8\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.171124 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.272738 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.272856 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.272887 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.272924 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.272954 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-logs\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.272975 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.273013 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgjs8\" (UniqueName: \"kubernetes.io/projected/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-kube-api-access-fgjs8\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.273044 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.273385 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.273573 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-logs\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.273587 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.278364 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.278776 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.279398 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.295581 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.296506 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgjs8\" (UniqueName: \"kubernetes.io/projected/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-kube-api-access-fgjs8\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.302780 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:22:50 crc kubenswrapper[4816]: I0216 13:22:50.423345 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:22:51 crc kubenswrapper[4816]: I0216 13:22:51.413951 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3ed4630-9bfe-4c17-bfab-eb915f1871b9" path="/var/lib/kubelet/pods/c3ed4630-9bfe-4c17-bfab-eb915f1871b9/volumes" Feb 16 13:22:53 crc kubenswrapper[4816]: I0216 13:22:53.573760 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" podUID="90566558-9f0d-4487-a86c-4ef20464421b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Feb 16 13:22:56 crc kubenswrapper[4816]: I0216 13:22:56.954498 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.020518 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-public-tls-certs\") pod \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.020882 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-scripts\") pod \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.020909 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-logs\") pod \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.020947 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-httpd-run\") pod \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.021018 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcws7\" (UniqueName: \"kubernetes.io/projected/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-kube-api-access-zcws7\") pod \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.021044 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.021138 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-combined-ca-bundle\") pod \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.021175 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-config-data\") pod \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\" (UID: \"cb3bc258-befb-4186-bc08-6c7e6ff9a49f\") " Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.024119 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cb3bc258-befb-4186-bc08-6c7e6ff9a49f" (UID: "cb3bc258-befb-4186-bc08-6c7e6ff9a49f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.030214 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-logs" (OuterVolumeSpecName: "logs") pod "cb3bc258-befb-4186-bc08-6c7e6ff9a49f" (UID: "cb3bc258-befb-4186-bc08-6c7e6ff9a49f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.075868 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-kube-api-access-zcws7" (OuterVolumeSpecName: "kube-api-access-zcws7") pod "cb3bc258-befb-4186-bc08-6c7e6ff9a49f" (UID: "cb3bc258-befb-4186-bc08-6c7e6ff9a49f"). InnerVolumeSpecName "kube-api-access-zcws7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.075892 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "cb3bc258-befb-4186-bc08-6c7e6ff9a49f" (UID: "cb3bc258-befb-4186-bc08-6c7e6ff9a49f"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.075975 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-scripts" (OuterVolumeSpecName: "scripts") pod "cb3bc258-befb-4186-bc08-6c7e6ff9a49f" (UID: "cb3bc258-befb-4186-bc08-6c7e6ff9a49f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.082238 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb3bc258-befb-4186-bc08-6c7e6ff9a49f" (UID: "cb3bc258-befb-4186-bc08-6c7e6ff9a49f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.128938 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.128965 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.128977 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.128991 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcws7\" (UniqueName: \"kubernetes.io/projected/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-kube-api-access-zcws7\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.129050 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.129063 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.131304 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb3bc258-befb-4186-bc08-6c7e6ff9a49f","Type":"ContainerDied","Data":"fdffb8ef70c99126f8da95a4bc55dd39471351d442e24719176216b458e588d0"} Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.131395 4816 scope.go:117] "RemoveContainer" containerID="8bc8fe49a5ee2cfa9fa422c7744b7fb3e34159f69a3b5e08628e248066222a49" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.131610 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.138179 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cb3bc258-befb-4186-bc08-6c7e6ff9a49f" (UID: "cb3bc258-befb-4186-bc08-6c7e6ff9a49f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.163063 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.210219 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-config-data" (OuterVolumeSpecName: "config-data") pod "cb3bc258-befb-4186-bc08-6c7e6ff9a49f" (UID: "cb3bc258-befb-4186-bc08-6c7e6ff9a49f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.230547 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.230594 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.230612 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3bc258-befb-4186-bc08-6c7e6ff9a49f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.473849 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.489380 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.499697 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:22:57 crc kubenswrapper[4816]: E0216 13:22:57.500049 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerName="glance-httpd" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.500067 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerName="glance-httpd" Feb 16 13:22:57 crc kubenswrapper[4816]: E0216 13:22:57.500099 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerName="glance-log" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.500105 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerName="glance-log" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.500252 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerName="glance-log" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.500277 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" containerName="glance-httpd" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.501477 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.509301 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.509549 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.521599 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.638408 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.638465 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l5gk\" (UniqueName: \"kubernetes.io/projected/3219985d-af01-44c0-9945-075a41b0326c-kube-api-access-8l5gk\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.638509 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.638701 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-logs\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.638769 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.638855 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-config-data\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.638931 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.639960 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-scripts\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.741745 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.741825 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-scripts\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.741875 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.741909 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l5gk\" (UniqueName: \"kubernetes.io/projected/3219985d-af01-44c0-9945-075a41b0326c-kube-api-access-8l5gk\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.741954 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.742012 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-logs\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.742045 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.742089 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-config-data\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.742398 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.742946 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.743631 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-logs\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.746725 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-scripts\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.747173 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.748169 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.750201 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-config-data\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.761031 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l5gk\" (UniqueName: \"kubernetes.io/projected/3219985d-af01-44c0-9945-075a41b0326c-kube-api-access-8l5gk\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.770002 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " pod="openstack/glance-default-external-api-0" Feb 16 13:22:57 crc kubenswrapper[4816]: I0216 13:22:57.835247 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.187324 4816 scope.go:117] "RemoveContainer" containerID="f195b0d2d37f2e7ab787be8c081e9c312e85e2fe84874b438f9e329e4468eaf0" Feb 16 13:22:58 crc kubenswrapper[4816]: E0216 13:22:58.213968 4816 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Feb 16 13:22:58 crc kubenswrapper[4816]: E0216 13:22:58.214406 4816 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rgcb2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-sqvh5_openstack(af6d642c-ddbc-4faa-8871-ad5556ff1a64): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 16 13:22:58 crc kubenswrapper[4816]: E0216 13:22:58.215713 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-sqvh5" podUID="af6d642c-ddbc-4faa-8871-ad5556ff1a64" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.397359 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.453432 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxl4l\" (UniqueName: \"kubernetes.io/projected/90566558-9f0d-4487-a86c-4ef20464421b-kube-api-access-hxl4l\") pod \"90566558-9f0d-4487-a86c-4ef20464421b\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.453508 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-sb\") pod \"90566558-9f0d-4487-a86c-4ef20464421b\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.453575 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-dns-svc\") pod \"90566558-9f0d-4487-a86c-4ef20464421b\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.453772 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-nb\") pod \"90566558-9f0d-4487-a86c-4ef20464421b\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.453841 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-config\") pod \"90566558-9f0d-4487-a86c-4ef20464421b\" (UID: \"90566558-9f0d-4487-a86c-4ef20464421b\") " Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.470290 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90566558-9f0d-4487-a86c-4ef20464421b-kube-api-access-hxl4l" (OuterVolumeSpecName: "kube-api-access-hxl4l") pod "90566558-9f0d-4487-a86c-4ef20464421b" (UID: "90566558-9f0d-4487-a86c-4ef20464421b"). InnerVolumeSpecName "kube-api-access-hxl4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.507516 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "90566558-9f0d-4487-a86c-4ef20464421b" (UID: "90566558-9f0d-4487-a86c-4ef20464421b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.507539 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-config" (OuterVolumeSpecName: "config") pod "90566558-9f0d-4487-a86c-4ef20464421b" (UID: "90566558-9f0d-4487-a86c-4ef20464421b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.512545 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "90566558-9f0d-4487-a86c-4ef20464421b" (UID: "90566558-9f0d-4487-a86c-4ef20464421b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.550598 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "90566558-9f0d-4487-a86c-4ef20464421b" (UID: "90566558-9f0d-4487-a86c-4ef20464421b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.560939 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.560969 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.560984 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxl4l\" (UniqueName: \"kubernetes.io/projected/90566558-9f0d-4487-a86c-4ef20464421b-kube-api-access-hxl4l\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.560996 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.561007 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90566558-9f0d-4487-a86c-4ef20464421b-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:22:58 crc kubenswrapper[4816]: E0216 13:22:58.779172 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13dd3829_afd7_421b_8caf_6f789f71fc25.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.878145 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.886531 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ndrvk"] Feb 16 13:22:58 crc kubenswrapper[4816]: W0216 13:22:58.900826 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf206b784_e69e_420e_a975_95d7e72f7a30.slice/crio-ee45cb5c4cc83b68908708b29deca9433c209dce329f81a2b7a3540c12cd48dd WatchSource:0}: Error finding container ee45cb5c4cc83b68908708b29deca9433c209dce329f81a2b7a3540c12cd48dd: Status 404 returned error can't find the container with id ee45cb5c4cc83b68908708b29deca9433c209dce329f81a2b7a3540c12cd48dd Feb 16 13:22:58 crc kubenswrapper[4816]: W0216 13:22:58.912595 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fe0907e_dfe1_4446_bb24_9d2630b7f33b.slice/crio-1bbc0ee5c1cc4dbc9efffd30c3c7266ed044d836c864cfe6f934cff698646a82 WatchSource:0}: Error finding container 1bbc0ee5c1cc4dbc9efffd30c3c7266ed044d836c864cfe6f934cff698646a82: Status 404 returned error can't find the container with id 1bbc0ee5c1cc4dbc9efffd30c3c7266ed044d836c864cfe6f934cff698646a82 Feb 16 13:22:58 crc kubenswrapper[4816]: I0216 13:22:58.976452 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:22:58 crc kubenswrapper[4816]: W0216 13:22:58.978344 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3219985d_af01_44c0_9945_075a41b0326c.slice/crio-d0e7f94ffa7c61b10eb872f9507bf0baea23a602295a7df6bbc77f84875876a8 WatchSource:0}: Error finding container d0e7f94ffa7c61b10eb872f9507bf0baea23a602295a7df6bbc77f84875876a8: Status 404 returned error can't find the container with id d0e7f94ffa7c61b10eb872f9507bf0baea23a602295a7df6bbc77f84875876a8 Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.150048 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4sts7" event={"ID":"58201649-b6df-4c32-a1c2-b672eefca745","Type":"ContainerStarted","Data":"378117e2c3b9f9411f85b556eb61749b15f23c041753736e75e440bb7444ab8e"} Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.153143 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" event={"ID":"90566558-9f0d-4487-a86c-4ef20464421b","Type":"ContainerDied","Data":"97b4d4304d126047f7d59373f0ca7fbbad3822c3cf9207820be2216bdfddfdee"} Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.153189 4816 scope.go:117] "RemoveContainer" containerID="fdbc95b7458ad30ac590cacf9be1c0879275975b764f1daeae329a1db2e0aa82" Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.153276 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-2qvgd" Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.162697 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8mdth" event={"ID":"0731f364-690d-4bf9-a86f-cbaa984c62c8","Type":"ContainerStarted","Data":"6521ecb5649f87cfc503189d690f32f55c580c29870839eb0b39951412f666a0"} Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.167766 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-4sts7" podStartSLOduration=3.409347523 podStartE2EDuration="25.167746421s" podCreationTimestamp="2026-02-16 13:22:34 +0000 UTC" firstStartedPulling="2026-02-16 13:22:36.432537775 +0000 UTC m=+1155.759251503" lastFinishedPulling="2026-02-16 13:22:58.190936673 +0000 UTC m=+1177.517650401" observedRunningTime="2026-02-16 13:22:59.167226767 +0000 UTC m=+1178.493940495" watchObservedRunningTime="2026-02-16 13:22:59.167746421 +0000 UTC m=+1178.494460149" Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.169688 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ndrvk" event={"ID":"f206b784-e69e-420e-a975-95d7e72f7a30","Type":"ContainerStarted","Data":"ee45cb5c4cc83b68908708b29deca9433c209dce329f81a2b7a3540c12cd48dd"} Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.172330 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3219985d-af01-44c0-9945-075a41b0326c","Type":"ContainerStarted","Data":"d0e7f94ffa7c61b10eb872f9507bf0baea23a602295a7df6bbc77f84875876a8"} Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.186908 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerStarted","Data":"6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d"} Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.195699 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-8mdth" podStartSLOduration=2.72889908 podStartE2EDuration="24.195673493s" podCreationTimestamp="2026-02-16 13:22:35 +0000 UTC" firstStartedPulling="2026-02-16 13:22:36.740308122 +0000 UTC m=+1156.067021850" lastFinishedPulling="2026-02-16 13:22:58.207082535 +0000 UTC m=+1177.533796263" observedRunningTime="2026-02-16 13:22:59.190597475 +0000 UTC m=+1178.517311203" watchObservedRunningTime="2026-02-16 13:22:59.195673493 +0000 UTC m=+1178.522387221" Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.198208 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5fe0907e-dfe1-4446-bb24-9d2630b7f33b","Type":"ContainerStarted","Data":"1bbc0ee5c1cc4dbc9efffd30c3c7266ed044d836c864cfe6f934cff698646a82"} Feb 16 13:22:59 crc kubenswrapper[4816]: E0216 13:22:59.225030 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-sqvh5" podUID="af6d642c-ddbc-4faa-8871-ad5556ff1a64" Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.225357 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2qvgd"] Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.244161 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2qvgd"] Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.255883 4816 scope.go:117] "RemoveContainer" containerID="20780713f4d7a496883270efe7d7238976e8bc327c9fc83b7c99593d4d010628" Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.411609 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90566558-9f0d-4487-a86c-4ef20464421b" path="/var/lib/kubelet/pods/90566558-9f0d-4487-a86c-4ef20464421b/volumes" Feb 16 13:22:59 crc kubenswrapper[4816]: I0216 13:22:59.412509 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb3bc258-befb-4186-bc08-6c7e6ff9a49f" path="/var/lib/kubelet/pods/cb3bc258-befb-4186-bc08-6c7e6ff9a49f/volumes" Feb 16 13:23:00 crc kubenswrapper[4816]: I0216 13:23:00.208604 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ndrvk" event={"ID":"f206b784-e69e-420e-a975-95d7e72f7a30","Type":"ContainerStarted","Data":"ba7efc3ec9c5dfa321c00c780802598dd93174bcbdddbffdf6d571339bef8440"} Feb 16 13:23:00 crc kubenswrapper[4816]: I0216 13:23:00.211038 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3219985d-af01-44c0-9945-075a41b0326c","Type":"ContainerStarted","Data":"1e09a1f04835f07cfd1b685d682d5e82695b2570c518ab84fd219b72b21c6832"} Feb 16 13:23:00 crc kubenswrapper[4816]: I0216 13:23:00.213746 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerStarted","Data":"38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca"} Feb 16 13:23:00 crc kubenswrapper[4816]: I0216 13:23:00.215815 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5fe0907e-dfe1-4446-bb24-9d2630b7f33b","Type":"ContainerStarted","Data":"62057e2e47d2b36f712d5caf7f83d928fc945948d0d2f16f1eefee4349c17fca"} Feb 16 13:23:00 crc kubenswrapper[4816]: I0216 13:23:00.233253 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-ndrvk" podStartSLOduration=13.233236139 podStartE2EDuration="13.233236139s" podCreationTimestamp="2026-02-16 13:22:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:00.229930009 +0000 UTC m=+1179.556643757" watchObservedRunningTime="2026-02-16 13:23:00.233236139 +0000 UTC m=+1179.559949857" Feb 16 13:23:01 crc kubenswrapper[4816]: I0216 13:23:01.241253 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3219985d-af01-44c0-9945-075a41b0326c","Type":"ContainerStarted","Data":"4cd7538bae0d430d31985d2458850d36a20537584597e3cc4ca05965d1aef164"} Feb 16 13:23:01 crc kubenswrapper[4816]: I0216 13:23:01.246274 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5fe0907e-dfe1-4446-bb24-9d2630b7f33b","Type":"ContainerStarted","Data":"dd1376ff3c8e54a0019ab33b65e5df0ac628bae3d89bd925953baa92163dded7"} Feb 16 13:23:01 crc kubenswrapper[4816]: I0216 13:23:01.311227 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.31120699 podStartE2EDuration="4.31120699s" podCreationTimestamp="2026-02-16 13:22:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:01.259531748 +0000 UTC m=+1180.586245496" watchObservedRunningTime="2026-02-16 13:23:01.31120699 +0000 UTC m=+1180.637920718" Feb 16 13:23:01 crc kubenswrapper[4816]: I0216 13:23:01.325330 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=11.325260043 podStartE2EDuration="11.325260043s" podCreationTimestamp="2026-02-16 13:22:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:01.323497215 +0000 UTC m=+1180.650210943" watchObservedRunningTime="2026-02-16 13:23:01.325260043 +0000 UTC m=+1180.651973771" Feb 16 13:23:02 crc kubenswrapper[4816]: I0216 13:23:02.265044 4816 generic.go:334] "Generic (PLEG): container finished" podID="b0d3c9a3-acab-4211-8a7f-90f5a6fe0060" containerID="cf8f50b8e01719c98fb9295479623e2775e7238b5f15f7ebf0bc469d7d0b9a1b" exitCode=0 Feb 16 13:23:02 crc kubenswrapper[4816]: I0216 13:23:02.265418 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6nfpn" event={"ID":"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060","Type":"ContainerDied","Data":"cf8f50b8e01719c98fb9295479623e2775e7238b5f15f7ebf0bc469d7d0b9a1b"} Feb 16 13:23:03 crc kubenswrapper[4816]: I0216 13:23:03.275103 4816 generic.go:334] "Generic (PLEG): container finished" podID="f206b784-e69e-420e-a975-95d7e72f7a30" containerID="ba7efc3ec9c5dfa321c00c780802598dd93174bcbdddbffdf6d571339bef8440" exitCode=0 Feb 16 13:23:03 crc kubenswrapper[4816]: I0216 13:23:03.275192 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ndrvk" event={"ID":"f206b784-e69e-420e-a975-95d7e72f7a30","Type":"ContainerDied","Data":"ba7efc3ec9c5dfa321c00c780802598dd93174bcbdddbffdf6d571339bef8440"} Feb 16 13:23:03 crc kubenswrapper[4816]: I0216 13:23:03.278466 4816 generic.go:334] "Generic (PLEG): container finished" podID="58201649-b6df-4c32-a1c2-b672eefca745" containerID="378117e2c3b9f9411f85b556eb61749b15f23c041753736e75e440bb7444ab8e" exitCode=0 Feb 16 13:23:03 crc kubenswrapper[4816]: I0216 13:23:03.278559 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4sts7" event={"ID":"58201649-b6df-4c32-a1c2-b672eefca745","Type":"ContainerDied","Data":"378117e2c3b9f9411f85b556eb61749b15f23c041753736e75e440bb7444ab8e"} Feb 16 13:23:03 crc kubenswrapper[4816]: I0216 13:23:03.280425 4816 generic.go:334] "Generic (PLEG): container finished" podID="0731f364-690d-4bf9-a86f-cbaa984c62c8" containerID="6521ecb5649f87cfc503189d690f32f55c580c29870839eb0b39951412f666a0" exitCode=0 Feb 16 13:23:03 crc kubenswrapper[4816]: I0216 13:23:03.280612 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8mdth" event={"ID":"0731f364-690d-4bf9-a86f-cbaa984c62c8","Type":"ContainerDied","Data":"6521ecb5649f87cfc503189d690f32f55c580c29870839eb0b39951412f666a0"} Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.121619 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.286736 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-combined-ca-bundle\") pod \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.286790 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-config\") pod \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.286918 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v88p4\" (UniqueName: \"kubernetes.io/projected/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-kube-api-access-v88p4\") pod \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\" (UID: \"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060\") " Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.290828 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerStarted","Data":"adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00"} Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.292472 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6nfpn" event={"ID":"b0d3c9a3-acab-4211-8a7f-90f5a6fe0060","Type":"ContainerDied","Data":"0965e7d1adbd2e2c8219f553c3911c9af38b142aea1a19986fe339097192bd60"} Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.292503 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0965e7d1adbd2e2c8219f553c3911c9af38b142aea1a19986fe339097192bd60" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.292557 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6nfpn" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.294593 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-kube-api-access-v88p4" (OuterVolumeSpecName: "kube-api-access-v88p4") pod "b0d3c9a3-acab-4211-8a7f-90f5a6fe0060" (UID: "b0d3c9a3-acab-4211-8a7f-90f5a6fe0060"). InnerVolumeSpecName "kube-api-access-v88p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.320569 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0d3c9a3-acab-4211-8a7f-90f5a6fe0060" (UID: "b0d3c9a3-acab-4211-8a7f-90f5a6fe0060"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.326803 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-config" (OuterVolumeSpecName: "config") pod "b0d3c9a3-acab-4211-8a7f-90f5a6fe0060" (UID: "b0d3c9a3-acab-4211-8a7f-90f5a6fe0060"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.388833 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v88p4\" (UniqueName: \"kubernetes.io/projected/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-kube-api-access-v88p4\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.388884 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.388893 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.626244 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9wwj2"] Feb 16 13:23:04 crc kubenswrapper[4816]: E0216 13:23:04.627304 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90566558-9f0d-4487-a86c-4ef20464421b" containerName="dnsmasq-dns" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.627407 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="90566558-9f0d-4487-a86c-4ef20464421b" containerName="dnsmasq-dns" Feb 16 13:23:04 crc kubenswrapper[4816]: E0216 13:23:04.627472 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90566558-9f0d-4487-a86c-4ef20464421b" containerName="init" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.627527 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="90566558-9f0d-4487-a86c-4ef20464421b" containerName="init" Feb 16 13:23:04 crc kubenswrapper[4816]: E0216 13:23:04.627578 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d3c9a3-acab-4211-8a7f-90f5a6fe0060" containerName="neutron-db-sync" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.627624 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d3c9a3-acab-4211-8a7f-90f5a6fe0060" containerName="neutron-db-sync" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.627906 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d3c9a3-acab-4211-8a7f-90f5a6fe0060" containerName="neutron-db-sync" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.627965 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="90566558-9f0d-4487-a86c-4ef20464421b" containerName="dnsmasq-dns" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.628930 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.681375 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9wwj2"] Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.693152 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5lx7\" (UniqueName: \"kubernetes.io/projected/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-kube-api-access-g5lx7\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.693204 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-config\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.693298 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.693319 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.693370 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.693399 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.738067 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-bf8ff5468-dcb5b"] Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.739412 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.743125 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.743795 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.744190 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2t2s8" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.744236 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.782721 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-bf8ff5468-dcb5b"] Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.797536 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.797597 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.797678 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.797719 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.797751 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5lx7\" (UniqueName: \"kubernetes.io/projected/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-kube-api-access-g5lx7\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.797778 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-config\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.798803 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.798871 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.798994 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-config\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.799419 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.800514 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.819694 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5lx7\" (UniqueName: \"kubernetes.io/projected/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-kube-api-access-g5lx7\") pod \"dnsmasq-dns-5ccc5c4795-9wwj2\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.888825 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.902993 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-config\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.903056 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qc9j\" (UniqueName: \"kubernetes.io/projected/91213a5b-68c8-4220-81db-f6b5f3ff324e-kube-api-access-6qc9j\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.903088 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-httpd-config\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.903184 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-ovndb-tls-certs\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.903233 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-combined-ca-bundle\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.935477 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8mdth" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.962490 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4sts7" Feb 16 13:23:04 crc kubenswrapper[4816]: I0216 13:23:04.968932 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.003955 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xzhw\" (UniqueName: \"kubernetes.io/projected/0731f364-690d-4bf9-a86f-cbaa984c62c8-kube-api-access-8xzhw\") pod \"0731f364-690d-4bf9-a86f-cbaa984c62c8\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.004006 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-combined-ca-bundle\") pod \"f206b784-e69e-420e-a975-95d7e72f7a30\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.004088 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-scripts\") pod \"f206b784-e69e-420e-a975-95d7e72f7a30\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.004116 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-fernet-keys\") pod \"f206b784-e69e-420e-a975-95d7e72f7a30\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.004179 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-credential-keys\") pod \"f206b784-e69e-420e-a975-95d7e72f7a30\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.004209 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2qkp\" (UniqueName: \"kubernetes.io/projected/f206b784-e69e-420e-a975-95d7e72f7a30-kube-api-access-h2qkp\") pod \"f206b784-e69e-420e-a975-95d7e72f7a30\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.004255 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-combined-ca-bundle\") pod \"0731f364-690d-4bf9-a86f-cbaa984c62c8\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.004315 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-db-sync-config-data\") pod \"0731f364-690d-4bf9-a86f-cbaa984c62c8\" (UID: \"0731f364-690d-4bf9-a86f-cbaa984c62c8\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.004382 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-config-data\") pod \"f206b784-e69e-420e-a975-95d7e72f7a30\" (UID: \"f206b784-e69e-420e-a975-95d7e72f7a30\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.005033 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qc9j\" (UniqueName: \"kubernetes.io/projected/91213a5b-68c8-4220-81db-f6b5f3ff324e-kube-api-access-6qc9j\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.005066 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-httpd-config\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.005125 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-ovndb-tls-certs\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.005177 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-combined-ca-bundle\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.005231 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-config\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.011739 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0731f364-690d-4bf9-a86f-cbaa984c62c8-kube-api-access-8xzhw" (OuterVolumeSpecName: "kube-api-access-8xzhw") pod "0731f364-690d-4bf9-a86f-cbaa984c62c8" (UID: "0731f364-690d-4bf9-a86f-cbaa984c62c8"). InnerVolumeSpecName "kube-api-access-8xzhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.013476 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f206b784-e69e-420e-a975-95d7e72f7a30" (UID: "f206b784-e69e-420e-a975-95d7e72f7a30"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.014443 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-config\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.015527 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-scripts" (OuterVolumeSpecName: "scripts") pod "f206b784-e69e-420e-a975-95d7e72f7a30" (UID: "f206b784-e69e-420e-a975-95d7e72f7a30"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.018698 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-ovndb-tls-certs\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.019226 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "f206b784-e69e-420e-a975-95d7e72f7a30" (UID: "f206b784-e69e-420e-a975-95d7e72f7a30"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.020007 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-combined-ca-bundle\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.021402 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-httpd-config\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.023801 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0731f364-690d-4bf9-a86f-cbaa984c62c8" (UID: "0731f364-690d-4bf9-a86f-cbaa984c62c8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.023894 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f206b784-e69e-420e-a975-95d7e72f7a30-kube-api-access-h2qkp" (OuterVolumeSpecName: "kube-api-access-h2qkp") pod "f206b784-e69e-420e-a975-95d7e72f7a30" (UID: "f206b784-e69e-420e-a975-95d7e72f7a30"). InnerVolumeSpecName "kube-api-access-h2qkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.030310 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qc9j\" (UniqueName: \"kubernetes.io/projected/91213a5b-68c8-4220-81db-f6b5f3ff324e-kube-api-access-6qc9j\") pod \"neutron-bf8ff5468-dcb5b\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.046056 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0731f364-690d-4bf9-a86f-cbaa984c62c8" (UID: "0731f364-690d-4bf9-a86f-cbaa984c62c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.050471 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f206b784-e69e-420e-a975-95d7e72f7a30" (UID: "f206b784-e69e-420e-a975-95d7e72f7a30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.056886 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-config-data" (OuterVolumeSpecName: "config-data") pod "f206b784-e69e-420e-a975-95d7e72f7a30" (UID: "f206b784-e69e-420e-a975-95d7e72f7a30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.075062 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.107123 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-scripts\") pod \"58201649-b6df-4c32-a1c2-b672eefca745\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.107220 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58201649-b6df-4c32-a1c2-b672eefca745-logs\") pod \"58201649-b6df-4c32-a1c2-b672eefca745\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.107288 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-combined-ca-bundle\") pod \"58201649-b6df-4c32-a1c2-b672eefca745\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.107323 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-config-data\") pod \"58201649-b6df-4c32-a1c2-b672eefca745\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.107477 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5wph\" (UniqueName: \"kubernetes.io/projected/58201649-b6df-4c32-a1c2-b672eefca745-kube-api-access-c5wph\") pod \"58201649-b6df-4c32-a1c2-b672eefca745\" (UID: \"58201649-b6df-4c32-a1c2-b672eefca745\") " Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.107993 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.108012 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xzhw\" (UniqueName: \"kubernetes.io/projected/0731f364-690d-4bf9-a86f-cbaa984c62c8-kube-api-access-8xzhw\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.108026 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.108039 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.108050 4816 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.108063 4816 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f206b784-e69e-420e-a975-95d7e72f7a30-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.108074 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2qkp\" (UniqueName: \"kubernetes.io/projected/f206b784-e69e-420e-a975-95d7e72f7a30-kube-api-access-h2qkp\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.108085 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.108095 4816 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0731f364-690d-4bf9-a86f-cbaa984c62c8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.108164 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58201649-b6df-4c32-a1c2-b672eefca745-logs" (OuterVolumeSpecName: "logs") pod "58201649-b6df-4c32-a1c2-b672eefca745" (UID: "58201649-b6df-4c32-a1c2-b672eefca745"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.119626 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-scripts" (OuterVolumeSpecName: "scripts") pod "58201649-b6df-4c32-a1c2-b672eefca745" (UID: "58201649-b6df-4c32-a1c2-b672eefca745"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.119923 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58201649-b6df-4c32-a1c2-b672eefca745-kube-api-access-c5wph" (OuterVolumeSpecName: "kube-api-access-c5wph") pod "58201649-b6df-4c32-a1c2-b672eefca745" (UID: "58201649-b6df-4c32-a1c2-b672eefca745"). InnerVolumeSpecName "kube-api-access-c5wph". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.149075 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-config-data" (OuterVolumeSpecName: "config-data") pod "58201649-b6df-4c32-a1c2-b672eefca745" (UID: "58201649-b6df-4c32-a1c2-b672eefca745"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.163994 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58201649-b6df-4c32-a1c2-b672eefca745" (UID: "58201649-b6df-4c32-a1c2-b672eefca745"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.213121 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5wph\" (UniqueName: \"kubernetes.io/projected/58201649-b6df-4c32-a1c2-b672eefca745-kube-api-access-c5wph\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.213197 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.213211 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58201649-b6df-4c32-a1c2-b672eefca745-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.213242 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.213253 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58201649-b6df-4c32-a1c2-b672eefca745-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.318636 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4sts7" event={"ID":"58201649-b6df-4c32-a1c2-b672eefca745","Type":"ContainerDied","Data":"f27ec706c1005cb99f36920350b302e1a020f80345d46432c243d0357fffe2b9"} Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.318710 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f27ec706c1005cb99f36920350b302e1a020f80345d46432c243d0357fffe2b9" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.318780 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4sts7" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.322498 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8mdth" event={"ID":"0731f364-690d-4bf9-a86f-cbaa984c62c8","Type":"ContainerDied","Data":"9f46a0eb5b270e8aeb9d7cb3adc664a1bcc8867e0c01f4f916e37f8e31465c65"} Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.322548 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f46a0eb5b270e8aeb9d7cb3adc664a1bcc8867e0c01f4f916e37f8e31465c65" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.326337 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8mdth" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.340127 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ndrvk" event={"ID":"f206b784-e69e-420e-a975-95d7e72f7a30","Type":"ContainerDied","Data":"ee45cb5c4cc83b68908708b29deca9433c209dce329f81a2b7a3540c12cd48dd"} Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.340161 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee45cb5c4cc83b68908708b29deca9433c209dce329f81a2b7a3540c12cd48dd" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.340238 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ndrvk" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.633619 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9wwj2"] Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.633923 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5b47f74bcc-6q6s6"] Feb 16 13:23:05 crc kubenswrapper[4816]: E0216 13:23:05.634185 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0731f364-690d-4bf9-a86f-cbaa984c62c8" containerName="barbican-db-sync" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.638989 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0731f364-690d-4bf9-a86f-cbaa984c62c8" containerName="barbican-db-sync" Feb 16 13:23:05 crc kubenswrapper[4816]: E0216 13:23:05.639032 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f206b784-e69e-420e-a975-95d7e72f7a30" containerName="keystone-bootstrap" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.639126 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f206b784-e69e-420e-a975-95d7e72f7a30" containerName="keystone-bootstrap" Feb 16 13:23:05 crc kubenswrapper[4816]: E0216 13:23:05.639144 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58201649-b6df-4c32-a1c2-b672eefca745" containerName="placement-db-sync" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.639149 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="58201649-b6df-4c32-a1c2-b672eefca745" containerName="placement-db-sync" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.639391 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0731f364-690d-4bf9-a86f-cbaa984c62c8" containerName="barbican-db-sync" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.639407 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f206b784-e69e-420e-a975-95d7e72f7a30" containerName="keystone-bootstrap" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.639420 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="58201649-b6df-4c32-a1c2-b672eefca745" containerName="placement-db-sync" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.639919 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5b47f74bcc-6q6s6"] Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.640010 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.655906 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.656141 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6zz64" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.656252 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.656360 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.656528 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.661979 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.675991 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-55b8c8fc68-7vq25"] Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.680025 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.688359 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-lmgvm" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.688617 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.688792 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.688882 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.689047 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.774748 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55b8c8fc68-7vq25"] Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.823165 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6b7bf89895-xstsf"] Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.824569 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828245 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-combined-ca-bundle\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828310 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vflv\" (UniqueName: \"kubernetes.io/projected/ff515b67-05f3-478c-9613-cef5044dadaa-kube-api-access-2vflv\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828339 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-config-data\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828389 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data-custom\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828417 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-credential-keys\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828453 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-config-data\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828470 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-internal-tls-certs\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828500 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828524 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-combined-ca-bundle\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828543 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-fernet-keys\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828572 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-combined-ca-bundle\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828597 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-public-tls-certs\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828636 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-public-tls-certs\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828695 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-scripts\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828734 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-internal-tls-certs\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828759 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff515b67-05f3-478c-9613-cef5044dadaa-logs\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828781 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-scripts\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828826 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqmjw\" (UniqueName: \"kubernetes.io/projected/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-kube-api-access-hqmjw\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828845 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c830726-778d-4471-98f0-abe404146440-logs\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.828876 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmhlv\" (UniqueName: \"kubernetes.io/projected/1c830726-778d-4471-98f0-abe404146440-kube-api-access-mmhlv\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.829429 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.840917 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.841135 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-lwnff" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.852693 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b7bf89895-xstsf"] Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.890716 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-67998cc688-ffntn"] Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.892115 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.897985 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930290 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-combined-ca-bundle\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930348 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vflv\" (UniqueName: \"kubernetes.io/projected/ff515b67-05f3-478c-9613-cef5044dadaa-kube-api-access-2vflv\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930382 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-config-data\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930458 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data-custom\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930485 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-credential-keys\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930508 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-config-data\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930528 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-internal-tls-certs\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930552 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930578 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-combined-ca-bundle\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930606 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-fernet-keys\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930642 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-combined-ca-bundle\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930681 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-public-tls-certs\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930718 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-public-tls-certs\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930741 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-scripts\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930786 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-internal-tls-certs\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930808 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff515b67-05f3-478c-9613-cef5044dadaa-logs\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930827 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-scripts\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930884 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqmjw\" (UniqueName: \"kubernetes.io/projected/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-kube-api-access-hqmjw\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930906 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c830726-778d-4471-98f0-abe404146440-logs\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.930943 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmhlv\" (UniqueName: \"kubernetes.io/projected/1c830726-778d-4471-98f0-abe404146440-kube-api-access-mmhlv\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.958576 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-credential-keys\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.960035 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff515b67-05f3-478c-9613-cef5044dadaa-logs\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.960157 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-67998cc688-ffntn"] Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.960269 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c830726-778d-4471-98f0-abe404146440-logs\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.967377 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-combined-ca-bundle\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.967764 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-internal-tls-certs\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.968050 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-scripts\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.968521 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-internal-tls-certs\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.969499 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vflv\" (UniqueName: \"kubernetes.io/projected/ff515b67-05f3-478c-9613-cef5044dadaa-kube-api-access-2vflv\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.970273 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-config-data\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.972192 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmhlv\" (UniqueName: \"kubernetes.io/projected/1c830726-778d-4471-98f0-abe404146440-kube-api-access-mmhlv\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.973031 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-public-tls-certs\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.974081 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-combined-ca-bundle\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.975360 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-scripts\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.979610 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-public-tls-certs\") pod \"placement-55b8c8fc68-7vq25\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.983698 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-config-data\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.991226 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-fernet-keys\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.991741 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.992160 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data-custom\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.992192 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9wwj2"] Feb 16 13:23:05 crc kubenswrapper[4816]: I0216 13:23:05.995269 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqmjw\" (UniqueName: \"kubernetes.io/projected/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-kube-api-access-hqmjw\") pod \"keystone-5b47f74bcc-6q6s6\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.008453 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.009277 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-combined-ca-bundle\") pod \"barbican-worker-6b7bf89895-xstsf\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.020729 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-6g2pf"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.022325 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035546 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-combined-ca-bundle\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035591 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035633 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qj4t\" (UniqueName: \"kubernetes.io/projected/a9c47f01-6045-4af2-82f5-3939a41029e6-kube-api-access-5qj4t\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035670 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035693 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-config\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035708 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035728 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-svc\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035746 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035773 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c47f01-6045-4af2-82f5-3939a41029e6-logs\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035825 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xhfs\" (UniqueName: \"kubernetes.io/projected/3a864762-5560-46b5-86ef-1ad6dd3adfa7-kube-api-access-2xhfs\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.035870 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data-custom\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.088322 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-6g2pf"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.138924 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-combined-ca-bundle\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.138976 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.139018 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qj4t\" (UniqueName: \"kubernetes.io/projected/a9c47f01-6045-4af2-82f5-3939a41029e6-kube-api-access-5qj4t\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.139044 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.139065 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-config\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.139081 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.139111 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-svc\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.139131 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.139162 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c47f01-6045-4af2-82f5-3939a41029e6-logs\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.139211 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xhfs\" (UniqueName: \"kubernetes.io/projected/3a864762-5560-46b5-86ef-1ad6dd3adfa7-kube-api-access-2xhfs\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.139247 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data-custom\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.142327 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-svc\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.145232 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data-custom\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.145793 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.146071 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c47f01-6045-4af2-82f5-3939a41029e6-logs\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.146864 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-config\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.146903 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-bf8ff5468-dcb5b"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.147389 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.151167 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.151248 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.156625 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-combined-ca-bundle\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.162232 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5ffd8b88f4-cqjcr"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.163918 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.181359 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.183428 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xhfs\" (UniqueName: \"kubernetes.io/projected/3a864762-5560-46b5-86ef-1ad6dd3adfa7-kube-api-access-2xhfs\") pod \"dnsmasq-dns-688c87cc99-6g2pf\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.208734 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-757c679767-6dfp5"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.210756 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.220137 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qj4t\" (UniqueName: \"kubernetes.io/projected/a9c47f01-6045-4af2-82f5-3939a41029e6-kube-api-access-5qj4t\") pod \"barbican-keystone-listener-67998cc688-ffntn\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.279832 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.333782 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.356178 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-combined-ca-bundle\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.356244 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/585ce61c-bb97-4b2c-bea8-c55d06e6db79-logs\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.356268 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-public-tls-certs\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.356299 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-config-data\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.356383 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-combined-ca-bundle\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.363250 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data-custom\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.363361 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26c49ecf-0c54-4aa7-893f-861370b1cdbd-logs\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.363476 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-677hs\" (UniqueName: \"kubernetes.io/projected/585ce61c-bb97-4b2c-bea8-c55d06e6db79-kube-api-access-677hs\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.363508 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.363552 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-scripts\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.363629 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-internal-tls-certs\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.363778 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpldd\" (UniqueName: \"kubernetes.io/projected/26c49ecf-0c54-4aa7-893f-861370b1cdbd-kube-api-access-mpldd\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.467404 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-combined-ca-bundle\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474074 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data-custom\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474181 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26c49ecf-0c54-4aa7-893f-861370b1cdbd-logs\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474349 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-677hs\" (UniqueName: \"kubernetes.io/projected/585ce61c-bb97-4b2c-bea8-c55d06e6db79-kube-api-access-677hs\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474390 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474435 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-scripts\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474509 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-internal-tls-certs\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474647 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpldd\" (UniqueName: \"kubernetes.io/projected/26c49ecf-0c54-4aa7-893f-861370b1cdbd-kube-api-access-mpldd\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474798 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-combined-ca-bundle\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474858 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-public-tls-certs\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474893 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/585ce61c-bb97-4b2c-bea8-c55d06e6db79-logs\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.474961 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-config-data\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.480585 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" event={"ID":"0ff3ba73-e29e-4d5b-b176-80bac04a9a97","Type":"ContainerStarted","Data":"fcdd47bfe5d3337328e1b6582c1f4e1621c18ac3a3fd9afe8f18b84fffa97430"} Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.481625 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/585ce61c-bb97-4b2c-bea8-c55d06e6db79-logs\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.482257 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26c49ecf-0c54-4aa7-893f-861370b1cdbd-logs\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.493219 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.506748 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5ffd8b88f4-cqjcr"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.517594 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-config-data\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.552924 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bf8ff5468-dcb5b" event={"ID":"91213a5b-68c8-4220-81db-f6b5f3ff324e","Type":"ContainerStarted","Data":"c9f347a26158777e4e8f0befc7879356d6cab8fce442030978ce8cca8e660a69"} Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.553530 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-combined-ca-bundle\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.566840 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-combined-ca-bundle\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.567059 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-677hs\" (UniqueName: \"kubernetes.io/projected/585ce61c-bb97-4b2c-bea8-c55d06e6db79-kube-api-access-677hs\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.567391 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-internal-tls-certs\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.568902 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data-custom\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.569200 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-public-tls-certs\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.569479 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpldd\" (UniqueName: \"kubernetes.io/projected/26c49ecf-0c54-4aa7-893f-861370b1cdbd-kube-api-access-mpldd\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.584359 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data\") pod \"barbican-worker-757c679767-6dfp5\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.593082 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-scripts\") pod \"placement-5ffd8b88f4-cqjcr\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.596446 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-757c679767-6dfp5"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.615861 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-cbf6d8974-7ddwq"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.617603 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.637711 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-cbf6d8974-7ddwq"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.672931 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.687001 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-combined-ca-bundle\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.687057 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data-custom\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.687121 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.687194 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v54q2\" (UniqueName: \"kubernetes.io/projected/ff0d5c9b-ff09-43bf-977f-e69533c63966-kube-api-access-v54q2\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.687221 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d5c9b-ff09-43bf-977f-e69533c63966-logs\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.719910 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6487d4d9b-cbp92"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.721760 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6487d4d9b-cbp92"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.721875 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.722941 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.726986 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7dd6997456-7z4ck"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.730512 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.738836 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.758744 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7dd6997456-7z4ck"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793482 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-config\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793536 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-ovndb-tls-certs\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793612 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data-custom\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793641 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5llf\" (UniqueName: \"kubernetes.io/projected/3674313a-95cf-4c2c-b15b-39045c4ad09d-kube-api-access-l5llf\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793691 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-combined-ca-bundle\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793718 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data-custom\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793865 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb758\" (UniqueName: \"kubernetes.io/projected/e948086b-213d-4435-a751-c716f71b95f2-kube-api-access-jb758\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793887 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793924 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793952 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3674313a-95cf-4c2c-b15b-39045c4ad09d-logs\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793968 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-combined-ca-bundle\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.793994 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-httpd-config\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.794014 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-combined-ca-bundle\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.794036 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v54q2\" (UniqueName: \"kubernetes.io/projected/ff0d5c9b-ff09-43bf-977f-e69533c63966-kube-api-access-v54q2\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.794056 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d5c9b-ff09-43bf-977f-e69533c63966-logs\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.794447 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d5c9b-ff09-43bf-977f-e69533c63966-logs\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.811112 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-combined-ca-bundle\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.818446 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data-custom\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.822732 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.828057 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v54q2\" (UniqueName: \"kubernetes.io/projected/ff0d5c9b-ff09-43bf-977f-e69533c63966-kube-api-access-v54q2\") pod \"barbican-keystone-listener-cbf6d8974-7ddwq\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897259 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-config\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897367 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-ovndb-tls-certs\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897441 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data-custom\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897467 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5llf\" (UniqueName: \"kubernetes.io/projected/3674313a-95cf-4c2c-b15b-39045c4ad09d-kube-api-access-l5llf\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897506 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb758\" (UniqueName: \"kubernetes.io/projected/e948086b-213d-4435-a751-c716f71b95f2-kube-api-access-jb758\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897520 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897570 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3674313a-95cf-4c2c-b15b-39045c4ad09d-logs\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897590 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-combined-ca-bundle\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897618 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-httpd-config\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.897639 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-combined-ca-bundle\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.901544 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-ovndb-tls-certs\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.903211 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.905941 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3674313a-95cf-4c2c-b15b-39045c4ad09d-logs\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.909748 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data-custom\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.921739 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-httpd-config\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.921743 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-combined-ca-bundle\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.905923 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-config\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.922334 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-combined-ca-bundle\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.922513 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55b8c8fc68-7vq25"] Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.925199 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5llf\" (UniqueName: \"kubernetes.io/projected/3674313a-95cf-4c2c-b15b-39045c4ad09d-kube-api-access-l5llf\") pod \"barbican-api-7dd6997456-7z4ck\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:06 crc kubenswrapper[4816]: I0216 13:23:06.929372 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb758\" (UniqueName: \"kubernetes.io/projected/e948086b-213d-4435-a751-c716f71b95f2-kube-api-access-jb758\") pod \"neutron-6487d4d9b-cbp92\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.007881 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.085159 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.191760 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.282327 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b7bf89895-xstsf"] Feb 16 13:23:07 crc kubenswrapper[4816]: W0216 13:23:07.380852 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff515b67_05f3_478c_9613_cef5044dadaa.slice/crio-7feb82541029621b406f60d64f995bce37370167ed58fdf2f6ced31408237486 WatchSource:0}: Error finding container 7feb82541029621b406f60d64f995bce37370167ed58fdf2f6ced31408237486: Status 404 returned error can't find the container with id 7feb82541029621b406f60d64f995bce37370167ed58fdf2f6ced31408237486 Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.521778 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5b47f74bcc-6q6s6"] Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.733118 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bf8ff5468-dcb5b" event={"ID":"91213a5b-68c8-4220-81db-f6b5f3ff324e","Type":"ContainerStarted","Data":"e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a"} Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.734996 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55b8c8fc68-7vq25" event={"ID":"1c830726-778d-4471-98f0-abe404146440","Type":"ContainerStarted","Data":"6b5b210f6292d1ae6d3755c1d2555206763b0b6a4ca8ae236b4c3d7c56803e76"} Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.737701 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b7bf89895-xstsf" event={"ID":"ff515b67-05f3-478c-9613-cef5044dadaa","Type":"ContainerStarted","Data":"7feb82541029621b406f60d64f995bce37370167ed58fdf2f6ced31408237486"} Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.742920 4816 generic.go:334] "Generic (PLEG): container finished" podID="0ff3ba73-e29e-4d5b-b176-80bac04a9a97" containerID="c4cf40adca4293a093c62cfc7d0ab14ea2f569b92a0a5b5b45781365581ad0e3" exitCode=0 Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.742979 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" event={"ID":"0ff3ba73-e29e-4d5b-b176-80bac04a9a97","Type":"ContainerDied","Data":"c4cf40adca4293a093c62cfc7d0ab14ea2f569b92a0a5b5b45781365581ad0e3"} Feb 16 13:23:07 crc kubenswrapper[4816]: W0216 13:23:07.780713 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7e01f36_29ae_4e7d_9dfb_c91c3f860060.slice/crio-fd798b75e708a13c82512a3ccace98fe96fd081fb5dffe8646c94bb8ebeb6d14 WatchSource:0}: Error finding container fd798b75e708a13c82512a3ccace98fe96fd081fb5dffe8646c94bb8ebeb6d14: Status 404 returned error can't find the container with id fd798b75e708a13c82512a3ccace98fe96fd081fb5dffe8646c94bb8ebeb6d14 Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.836082 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.837046 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.887259 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.891325 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 16 13:23:07 crc kubenswrapper[4816]: I0216 13:23:07.986551 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-6g2pf"] Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.156403 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-67998cc688-ffntn"] Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.180835 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5ffd8b88f4-cqjcr"] Feb 16 13:23:08 crc kubenswrapper[4816]: W0216 13:23:08.189050 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26c49ecf_0c54_4aa7_893f_861370b1cdbd.slice/crio-fcdf0757eb60c77b310e71e77c8516c1bd19a9d9fc2027399b6b712409b9e45a WatchSource:0}: Error finding container fcdf0757eb60c77b310e71e77c8516c1bd19a9d9fc2027399b6b712409b9e45a: Status 404 returned error can't find the container with id fcdf0757eb60c77b310e71e77c8516c1bd19a9d9fc2027399b6b712409b9e45a Feb 16 13:23:08 crc kubenswrapper[4816]: W0216 13:23:08.205805 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a864762_5560_46b5_86ef_1ad6dd3adfa7.slice/crio-4b6b0ecb9df21741fb767f0d59948463e2109d7b931081e851954d0f93f38dde WatchSource:0}: Error finding container 4b6b0ecb9df21741fb767f0d59948463e2109d7b931081e851954d0f93f38dde: Status 404 returned error can't find the container with id 4b6b0ecb9df21741fb767f0d59948463e2109d7b931081e851954d0f93f38dde Feb 16 13:23:08 crc kubenswrapper[4816]: W0216 13:23:08.207118 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod585ce61c_bb97_4b2c_bea8_c55d06e6db79.slice/crio-9cbb697e7ec76e01a0ae9873825fc00b7de069659e0b00f0c643a89e785386bf WatchSource:0}: Error finding container 9cbb697e7ec76e01a0ae9873825fc00b7de069659e0b00f0c643a89e785386bf: Status 404 returned error can't find the container with id 9cbb697e7ec76e01a0ae9873825fc00b7de069659e0b00f0c643a89e785386bf Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.210796 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-757c679767-6dfp5"] Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.225962 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-cbf6d8974-7ddwq"] Feb 16 13:23:08 crc kubenswrapper[4816]: W0216 13:23:08.413983 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3674313a_95cf_4c2c_b15b_39045c4ad09d.slice/crio-b4a93dd37996291f27020e4a38c9351923ead5d4e66b87bcd631433e38cfd636 WatchSource:0}: Error finding container b4a93dd37996291f27020e4a38c9351923ead5d4e66b87bcd631433e38cfd636: Status 404 returned error can't find the container with id b4a93dd37996291f27020e4a38c9351923ead5d4e66b87bcd631433e38cfd636 Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.414205 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-bf8ff5468-dcb5b"] Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.438853 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-8ff9ccb6f-bwqh8"] Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.440982 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.444385 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.445885 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.465701 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8ff9ccb6f-bwqh8"] Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.624939 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7dd6997456-7z4ck"] Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.658322 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6487d4d9b-cbp92"] Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.668664 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.703038 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-config\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.703085 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-httpd-config\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.703117 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-internal-tls-certs\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.703206 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-combined-ca-bundle\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.703699 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-public-tls-certs\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.703784 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-ovndb-tls-certs\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.703935 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhnpq\" (UniqueName: \"kubernetes.io/projected/4ae7c256-cd2e-4919-a488-84526307d47c-kube-api-access-mhnpq\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.787508 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5b47f74bcc-6q6s6" event={"ID":"b7e01f36-29ae-4e7d-9dfb-c91c3f860060","Type":"ContainerStarted","Data":"98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb"} Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.787574 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5b47f74bcc-6q6s6" event={"ID":"b7e01f36-29ae-4e7d-9dfb-c91c3f860060","Type":"ContainerStarted","Data":"fd798b75e708a13c82512a3ccace98fe96fd081fb5dffe8646c94bb8ebeb6d14"} Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.787716 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.789109 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5ffd8b88f4-cqjcr" event={"ID":"26c49ecf-0c54-4aa7-893f-861370b1cdbd","Type":"ContainerStarted","Data":"fcdf0757eb60c77b310e71e77c8516c1bd19a9d9fc2027399b6b712409b9e45a"} Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.812312 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-nb\") pod \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.812395 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-config\") pod \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.812734 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5lx7\" (UniqueName: \"kubernetes.io/projected/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-kube-api-access-g5lx7\") pod \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.812804 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-svc\") pod \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.812882 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-swift-storage-0\") pod \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.813020 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-sb\") pod \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\" (UID: \"0ff3ba73-e29e-4d5b-b176-80bac04a9a97\") " Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.813436 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhnpq\" (UniqueName: \"kubernetes.io/projected/4ae7c256-cd2e-4919-a488-84526307d47c-kube-api-access-mhnpq\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.813471 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-config\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.813496 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-httpd-config\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.813530 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-internal-tls-certs\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.813710 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-combined-ca-bundle\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.813770 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-public-tls-certs\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.813853 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-ovndb-tls-certs\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.815470 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55b8c8fc68-7vq25" event={"ID":"1c830726-778d-4471-98f0-abe404146440","Type":"ContainerStarted","Data":"8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8"} Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.815528 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55b8c8fc68-7vq25" event={"ID":"1c830726-778d-4471-98f0-abe404146440","Type":"ContainerStarted","Data":"5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1"} Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.816138 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.816162 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.821300 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5b47f74bcc-6q6s6" podStartSLOduration=3.821274465 podStartE2EDuration="3.821274465s" podCreationTimestamp="2026-02-16 13:23:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:08.811493347 +0000 UTC m=+1188.138207075" watchObservedRunningTime="2026-02-16 13:23:08.821274465 +0000 UTC m=+1188.147988193" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.824641 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7dd6997456-7z4ck" event={"ID":"3674313a-95cf-4c2c-b15b-39045c4ad09d","Type":"ContainerStarted","Data":"b4a93dd37996291f27020e4a38c9351923ead5d4e66b87bcd631433e38cfd636"} Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.828937 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6487d4d9b-cbp92" event={"ID":"e948086b-213d-4435-a751-c716f71b95f2","Type":"ContainerStarted","Data":"920a3d840cff98e5e8379e0175789b4cd4acd9597c16a33e6646ecc9e729abba"} Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.834407 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhnpq\" (UniqueName: \"kubernetes.io/projected/4ae7c256-cd2e-4919-a488-84526307d47c-kube-api-access-mhnpq\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.838050 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-combined-ca-bundle\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.847627 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-config\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.862278 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-internal-tls-certs\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.862465 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-ovndb-tls-certs\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.862614 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-httpd-config\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.863758 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-kube-api-access-g5lx7" (OuterVolumeSpecName: "kube-api-access-g5lx7") pod "0ff3ba73-e29e-4d5b-b176-80bac04a9a97" (UID: "0ff3ba73-e29e-4d5b-b176-80bac04a9a97"). InnerVolumeSpecName "kube-api-access-g5lx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.868388 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-public-tls-certs\") pod \"neutron-8ff9ccb6f-bwqh8\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.874068 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" event={"ID":"0ff3ba73-e29e-4d5b-b176-80bac04a9a97","Type":"ContainerDied","Data":"fcdd47bfe5d3337328e1b6582c1f4e1621c18ac3a3fd9afe8f18b84fffa97430"} Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.874209 4816 scope.go:117] "RemoveContainer" containerID="c4cf40adca4293a093c62cfc7d0ab14ea2f569b92a0a5b5b45781365581ad0e3" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.874413 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-9wwj2" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.887622 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" event={"ID":"ff0d5c9b-ff09-43bf-977f-e69533c63966","Type":"ContainerStarted","Data":"62340f1d587df0812585af4125da416fa66e04d90db26a84d8ca70065bbc5a4d"} Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.889083 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-55b8c8fc68-7vq25" podStartSLOduration=3.889060436 podStartE2EDuration="3.889060436s" podCreationTimestamp="2026-02-16 13:23:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:08.845850946 +0000 UTC m=+1188.172564674" watchObservedRunningTime="2026-02-16 13:23:08.889060436 +0000 UTC m=+1188.215774164" Feb 16 13:23:08 crc kubenswrapper[4816]: I0216 13:23:08.908677 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" event={"ID":"3a864762-5560-46b5-86ef-1ad6dd3adfa7","Type":"ContainerStarted","Data":"4b6b0ecb9df21741fb767f0d59948463e2109d7b931081e851954d0f93f38dde"} Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:08.919395 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5lx7\" (UniqueName: \"kubernetes.io/projected/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-kube-api-access-g5lx7\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.130851 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-757c679767-6dfp5" event={"ID":"585ce61c-bb97-4b2c-bea8-c55d06e6db79","Type":"ContainerStarted","Data":"9cbb697e7ec76e01a0ae9873825fc00b7de069659e0b00f0c643a89e785386bf"} Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.133618 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.143901 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" event={"ID":"a9c47f01-6045-4af2-82f5-3939a41029e6","Type":"ContainerStarted","Data":"9495b88045b8a4545a5be37bbf03fdc38c00bb2b8f30e393d07040a828c23309"} Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.150308 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bf8ff5468-dcb5b" event={"ID":"91213a5b-68c8-4220-81db-f6b5f3ff324e","Type":"ContainerStarted","Data":"847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6"} Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.150375 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.150550 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.150567 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.192329 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-bf8ff5468-dcb5b" podStartSLOduration=5.192283277 podStartE2EDuration="5.192283277s" podCreationTimestamp="2026-02-16 13:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:09.175467787 +0000 UTC m=+1188.502181515" watchObservedRunningTime="2026-02-16 13:23:09.192283277 +0000 UTC m=+1188.518997005" Feb 16 13:23:09 crc kubenswrapper[4816]: E0216 13:23:09.538863 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13dd3829_afd7_421b_8caf_6f789f71fc25.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.690632 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-config" (OuterVolumeSpecName: "config") pod "0ff3ba73-e29e-4d5b-b176-80bac04a9a97" (UID: "0ff3ba73-e29e-4d5b-b176-80bac04a9a97"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.698437 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0ff3ba73-e29e-4d5b-b176-80bac04a9a97" (UID: "0ff3ba73-e29e-4d5b-b176-80bac04a9a97"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.702943 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0ff3ba73-e29e-4d5b-b176-80bac04a9a97" (UID: "0ff3ba73-e29e-4d5b-b176-80bac04a9a97"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.704685 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0ff3ba73-e29e-4d5b-b176-80bac04a9a97" (UID: "0ff3ba73-e29e-4d5b-b176-80bac04a9a97"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.716334 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0ff3ba73-e29e-4d5b-b176-80bac04a9a97" (UID: "0ff3ba73-e29e-4d5b-b176-80bac04a9a97"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.759251 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.759627 4816 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.759639 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.759664 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.759673 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff3ba73-e29e-4d5b-b176-80bac04a9a97-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.881335 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9wwj2"] Feb 16 13:23:09 crc kubenswrapper[4816]: I0216 13:23:09.903256 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9wwj2"] Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.196909 4816 generic.go:334] "Generic (PLEG): container finished" podID="3a864762-5560-46b5-86ef-1ad6dd3adfa7" containerID="3ec7bb1fa094c748d9f0a438072eea2ded5beaac207ad9e94a2baa9e58d3d69f" exitCode=0 Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.197221 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" event={"ID":"3a864762-5560-46b5-86ef-1ad6dd3adfa7","Type":"ContainerDied","Data":"3ec7bb1fa094c748d9f0a438072eea2ded5beaac207ad9e94a2baa9e58d3d69f"} Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.206280 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5ffd8b88f4-cqjcr" event={"ID":"26c49ecf-0c54-4aa7-893f-861370b1cdbd","Type":"ContainerStarted","Data":"0a37364f47721e42e8d7d8c8e7e0b76b9f09f0c7e0a00afcf23bbc67bb3d615e"} Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.216807 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7dd6997456-7z4ck" event={"ID":"3674313a-95cf-4c2c-b15b-39045c4ad09d","Type":"ContainerStarted","Data":"332aaccdf8e9202da5c9d1efd02710688b01fcfcafd2c2088b80968af072484c"} Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.219907 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-bf8ff5468-dcb5b" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerName="neutron-api" containerID="cri-o://e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a" gracePeriod=30 Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.220035 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-bf8ff5468-dcb5b" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerName="neutron-httpd" containerID="cri-o://847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6" gracePeriod=30 Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.424398 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.425576 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.538642 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.538709 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 16 13:23:10 crc kubenswrapper[4816]: I0216 13:23:10.639435 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8ff9ccb6f-bwqh8"] Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.230589 4816 generic.go:334] "Generic (PLEG): container finished" podID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerID="847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6" exitCode=0 Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.230702 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bf8ff5468-dcb5b" event={"ID":"91213a5b-68c8-4220-81db-f6b5f3ff324e","Type":"ContainerDied","Data":"847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6"} Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.243545 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5ffd8b88f4-cqjcr" event={"ID":"26c49ecf-0c54-4aa7-893f-861370b1cdbd","Type":"ContainerStarted","Data":"ed64af8ac2faddc8f5b3609993e7e85b7c02038ee89682aa306fb9d136d0c815"} Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.244958 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.244988 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:11 crc kubenswrapper[4816]: W0216 13:23:11.245694 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ae7c256_cd2e_4919_a488_84526307d47c.slice/crio-865c7c280a9a941bf9435d2fb5fb52ec7f41d12353c91825b170a42f294f61ec WatchSource:0}: Error finding container 865c7c280a9a941bf9435d2fb5fb52ec7f41d12353c91825b170a42f294f61ec: Status 404 returned error can't find the container with id 865c7c280a9a941bf9435d2fb5fb52ec7f41d12353c91825b170a42f294f61ec Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.248235 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7dd6997456-7z4ck" event={"ID":"3674313a-95cf-4c2c-b15b-39045c4ad09d","Type":"ContainerStarted","Data":"073f1f1add3cf51e4a0fe170d3f3ff0fb0431f417c70a7b522e6b526f0b43f40"} Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.248365 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.248442 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.249934 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.249949 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.250406 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6487d4d9b-cbp92" event={"ID":"e948086b-213d-4435-a751-c716f71b95f2","Type":"ContainerStarted","Data":"403d7d0e05eebee102b2d2a528727671b1ddf72a2a83a41898051db48d730e96"} Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.251350 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.251368 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.304104 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7dd6997456-7z4ck" podStartSLOduration=5.304079421 podStartE2EDuration="5.304079421s" podCreationTimestamp="2026-02-16 13:23:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:11.294180921 +0000 UTC m=+1190.620894659" watchObservedRunningTime="2026-02-16 13:23:11.304079421 +0000 UTC m=+1190.630793149" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.306911 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5ffd8b88f4-cqjcr" podStartSLOduration=5.306902538 podStartE2EDuration="5.306902538s" podCreationTimestamp="2026-02-16 13:23:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:11.278384229 +0000 UTC m=+1190.605097967" watchObservedRunningTime="2026-02-16 13:23:11.306902538 +0000 UTC m=+1190.633616256" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.426284 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ff3ba73-e29e-4d5b-b176-80bac04a9a97" path="/var/lib/kubelet/pods/0ff3ba73-e29e-4d5b-b176-80bac04a9a97/volumes" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.963024 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5b75bc5544-lb94h"] Feb 16 13:23:11 crc kubenswrapper[4816]: E0216 13:23:11.963425 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ff3ba73-e29e-4d5b-b176-80bac04a9a97" containerName="init" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.963439 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ff3ba73-e29e-4d5b-b176-80bac04a9a97" containerName="init" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.963643 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ff3ba73-e29e-4d5b-b176-80bac04a9a97" containerName="init" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.964762 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.967719 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.968192 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Feb 16 13:23:11 crc kubenswrapper[4816]: I0216 13:23:11.976084 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5b75bc5544-lb94h"] Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.049935 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-public-tls-certs\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.050001 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data-custom\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.050031 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592c5e3a-06b6-4b36-8281-c5c49051c945-logs\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.050075 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-combined-ca-bundle\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.050156 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-internal-tls-certs\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.050182 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.050205 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wg5z\" (UniqueName: \"kubernetes.io/projected/592c5e3a-06b6-4b36-8281-c5c49051c945-kube-api-access-8wg5z\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.385512 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wg5z\" (UniqueName: \"kubernetes.io/projected/592c5e3a-06b6-4b36-8281-c5c49051c945-kube-api-access-8wg5z\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.385863 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-public-tls-certs\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.385892 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data-custom\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.385918 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592c5e3a-06b6-4b36-8281-c5c49051c945-logs\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.385966 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-combined-ca-bundle\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.386035 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-internal-tls-certs\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.386054 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.386949 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592c5e3a-06b6-4b36-8281-c5c49051c945-logs\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.391142 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-public-tls-certs\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.406303 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data-custom\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.410115 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.411622 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-combined-ca-bundle\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.411745 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-internal-tls-certs\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.424038 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wg5z\" (UniqueName: \"kubernetes.io/projected/592c5e3a-06b6-4b36-8281-c5c49051c945-kube-api-access-8wg5z\") pod \"barbican-api-5b75bc5544-lb94h\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.455452 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8ff9ccb6f-bwqh8" event={"ID":"4ae7c256-cd2e-4919-a488-84526307d47c","Type":"ContainerStarted","Data":"865c7c280a9a941bf9435d2fb5fb52ec7f41d12353c91825b170a42f294f61ec"} Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.492581 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.492711 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.493859 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 16 13:23:12 crc kubenswrapper[4816]: I0216 13:23:12.587853 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.258948 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5b75bc5544-lb94h"] Feb 16 13:23:13 crc kubenswrapper[4816]: W0216 13:23:13.296693 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod592c5e3a_06b6_4b36_8281_c5c49051c945.slice/crio-547356f04fa0f8348ea58d056613cf41a4d33f9c9ae01797839c445e1d6ff4ea WatchSource:0}: Error finding container 547356f04fa0f8348ea58d056613cf41a4d33f9c9ae01797839c445e1d6ff4ea: Status 404 returned error can't find the container with id 547356f04fa0f8348ea58d056613cf41a4d33f9c9ae01797839c445e1d6ff4ea Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.464253 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6487d4d9b-cbp92" event={"ID":"e948086b-213d-4435-a751-c716f71b95f2","Type":"ContainerStarted","Data":"4c6c7ae0ae2a2aaf24f32900cba79dfeed3ae6f7ea3a213eed8d31fe5f2e53fd"} Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.465418 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.489975 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" event={"ID":"ff0d5c9b-ff09-43bf-977f-e69533c63966","Type":"ContainerStarted","Data":"2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3"} Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.490366 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" event={"ID":"ff0d5c9b-ff09-43bf-977f-e69533c63966","Type":"ContainerStarted","Data":"deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505"} Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.502575 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8ff9ccb6f-bwqh8" event={"ID":"4ae7c256-cd2e-4919-a488-84526307d47c","Type":"ContainerStarted","Data":"a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41"} Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.527493 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" event={"ID":"3a864762-5560-46b5-86ef-1ad6dd3adfa7","Type":"ContainerStarted","Data":"df32c655695662af3f4785beadfd5108fbf95a04faa2884f5decc908c0f8b4c1"} Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.528643 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.544111 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" podStartSLOduration=3.236564656 podStartE2EDuration="7.544059416s" podCreationTimestamp="2026-02-16 13:23:06 +0000 UTC" firstStartedPulling="2026-02-16 13:23:08.234353325 +0000 UTC m=+1187.561067053" lastFinishedPulling="2026-02-16 13:23:12.541848085 +0000 UTC m=+1191.868561813" observedRunningTime="2026-02-16 13:23:13.522169328 +0000 UTC m=+1192.848883056" watchObservedRunningTime="2026-02-16 13:23:13.544059416 +0000 UTC m=+1192.870773154" Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.549878 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-757c679767-6dfp5" event={"ID":"585ce61c-bb97-4b2c-bea8-c55d06e6db79","Type":"ContainerStarted","Data":"b0b6d2f42d6bfd99410fd4fbcca38774a4fbd1a10e9d1373d8a5aa64dbbd9e0a"} Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.557931 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6487d4d9b-cbp92" podStartSLOduration=7.557903474 podStartE2EDuration="7.557903474s" podCreationTimestamp="2026-02-16 13:23:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:13.493382422 +0000 UTC m=+1192.820096170" watchObservedRunningTime="2026-02-16 13:23:13.557903474 +0000 UTC m=+1192.884617202" Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.558694 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" event={"ID":"a9c47f01-6045-4af2-82f5-3939a41029e6","Type":"ContainerStarted","Data":"a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098"} Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.568316 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b75bc5544-lb94h" event={"ID":"592c5e3a-06b6-4b36-8281-c5c49051c945","Type":"ContainerStarted","Data":"547356f04fa0f8348ea58d056613cf41a4d33f9c9ae01797839c445e1d6ff4ea"} Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.576578 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-67998cc688-ffntn"] Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.577529 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.577551 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.578083 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b7bf89895-xstsf" event={"ID":"ff515b67-05f3-478c-9613-cef5044dadaa","Type":"ContainerStarted","Data":"26e8b6aa57a5f3a5980b56ccae6cd4d16e281f02afde6afe01b599e74cc5ae90"} Feb 16 13:23:13 crc kubenswrapper[4816]: I0216 13:23:13.604046 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" podStartSLOduration=8.604021183 podStartE2EDuration="8.604021183s" podCreationTimestamp="2026-02-16 13:23:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:13.555887399 +0000 UTC m=+1192.882601137" watchObservedRunningTime="2026-02-16 13:23:13.604021183 +0000 UTC m=+1192.930734911" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.179175 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.191438 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.586858 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" event={"ID":"a9c47f01-6045-4af2-82f5-3939a41029e6","Type":"ContainerStarted","Data":"a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc"} Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.588261 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" podUID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerName="barbican-keystone-listener" containerID="cri-o://a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc" gracePeriod=30 Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.588235 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" podUID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerName="barbican-keystone-listener-log" containerID="cri-o://a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098" gracePeriod=30 Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.590501 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b75bc5544-lb94h" event={"ID":"592c5e3a-06b6-4b36-8281-c5c49051c945","Type":"ContainerStarted","Data":"305b8eb6bcfac360528db193c73952f20605bc0004e0f5602cffb736efb9d9ec"} Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.590541 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b75bc5544-lb94h" event={"ID":"592c5e3a-06b6-4b36-8281-c5c49051c945","Type":"ContainerStarted","Data":"c3ee070672541f2475c7a5b84908e9db321ca75bcdb3b2238999b588eeb68da3"} Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.591407 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.591437 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.594091 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b7bf89895-xstsf" event={"ID":"ff515b67-05f3-478c-9613-cef5044dadaa","Type":"ContainerStarted","Data":"22a7346f911dacc5c0c309900a9551f250f0b975730d2b3b5083a38ea7a345ba"} Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.596447 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8ff9ccb6f-bwqh8" event={"ID":"4ae7c256-cd2e-4919-a488-84526307d47c","Type":"ContainerStarted","Data":"4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea"} Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.596574 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.598604 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-757c679767-6dfp5" event={"ID":"585ce61c-bb97-4b2c-bea8-c55d06e6db79","Type":"ContainerStarted","Data":"83a666709d0398bcf18db5bff64d1c6fa8da80e779c24200130a17a483a8ae2b"} Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.622421 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" podStartSLOduration=5.256197263 podStartE2EDuration="9.622397926s" podCreationTimestamp="2026-02-16 13:23:05 +0000 UTC" firstStartedPulling="2026-02-16 13:23:08.176562747 +0000 UTC m=+1187.503276475" lastFinishedPulling="2026-02-16 13:23:12.54276341 +0000 UTC m=+1191.869477138" observedRunningTime="2026-02-16 13:23:14.614803309 +0000 UTC m=+1193.941517037" watchObservedRunningTime="2026-02-16 13:23:14.622397926 +0000 UTC m=+1193.949111664" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.648975 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-757c679767-6dfp5" podStartSLOduration=4.237449981 podStartE2EDuration="8.648947761s" podCreationTimestamp="2026-02-16 13:23:06 +0000 UTC" firstStartedPulling="2026-02-16 13:23:08.218852392 +0000 UTC m=+1187.545566120" lastFinishedPulling="2026-02-16 13:23:12.630350172 +0000 UTC m=+1191.957063900" observedRunningTime="2026-02-16 13:23:14.641374794 +0000 UTC m=+1193.968088522" watchObservedRunningTime="2026-02-16 13:23:14.648947761 +0000 UTC m=+1193.975661489" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.665153 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5b75bc5544-lb94h" podStartSLOduration=3.665125013 podStartE2EDuration="3.665125013s" podCreationTimestamp="2026-02-16 13:23:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:14.660723462 +0000 UTC m=+1193.987437210" watchObservedRunningTime="2026-02-16 13:23:14.665125013 +0000 UTC m=+1193.991838741" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.685026 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-6b7bf89895-xstsf"] Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.721565 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6b7bf89895-xstsf" podStartSLOduration=4.46446803 podStartE2EDuration="9.721540123s" podCreationTimestamp="2026-02-16 13:23:05 +0000 UTC" firstStartedPulling="2026-02-16 13:23:07.398681763 +0000 UTC m=+1186.725395491" lastFinishedPulling="2026-02-16 13:23:12.655753856 +0000 UTC m=+1191.982467584" observedRunningTime="2026-02-16 13:23:14.70676694 +0000 UTC m=+1194.033480678" watchObservedRunningTime="2026-02-16 13:23:14.721540123 +0000 UTC m=+1194.048253851" Feb 16 13:23:14 crc kubenswrapper[4816]: I0216 13:23:14.755589 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-8ff9ccb6f-bwqh8" podStartSLOduration=6.755552772 podStartE2EDuration="6.755552772s" podCreationTimestamp="2026-02-16 13:23:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:14.733274154 +0000 UTC m=+1194.059987882" watchObservedRunningTime="2026-02-16 13:23:14.755552772 +0000 UTC m=+1194.082266500" Feb 16 13:23:15 crc kubenswrapper[4816]: I0216 13:23:15.616272 4816 generic.go:334] "Generic (PLEG): container finished" podID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerID="a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098" exitCode=143 Feb 16 13:23:15 crc kubenswrapper[4816]: I0216 13:23:15.618423 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" event={"ID":"a9c47f01-6045-4af2-82f5-3939a41029e6","Type":"ContainerDied","Data":"a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098"} Feb 16 13:23:16 crc kubenswrapper[4816]: I0216 13:23:16.641850 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-6b7bf89895-xstsf" podUID="ff515b67-05f3-478c-9613-cef5044dadaa" containerName="barbican-worker-log" containerID="cri-o://26e8b6aa57a5f3a5980b56ccae6cd4d16e281f02afde6afe01b599e74cc5ae90" gracePeriod=30 Feb 16 13:23:16 crc kubenswrapper[4816]: I0216 13:23:16.642486 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-6b7bf89895-xstsf" podUID="ff515b67-05f3-478c-9613-cef5044dadaa" containerName="barbican-worker" containerID="cri-o://22a7346f911dacc5c0c309900a9551f250f0b975730d2b3b5083a38ea7a345ba" gracePeriod=30 Feb 16 13:23:17 crc kubenswrapper[4816]: I0216 13:23:17.654232 4816 generic.go:334] "Generic (PLEG): container finished" podID="ff515b67-05f3-478c-9613-cef5044dadaa" containerID="26e8b6aa57a5f3a5980b56ccae6cd4d16e281f02afde6afe01b599e74cc5ae90" exitCode=143 Feb 16 13:23:17 crc kubenswrapper[4816]: I0216 13:23:17.654313 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b7bf89895-xstsf" event={"ID":"ff515b67-05f3-478c-9613-cef5044dadaa","Type":"ContainerDied","Data":"26e8b6aa57a5f3a5980b56ccae6cd4d16e281f02afde6afe01b599e74cc5ae90"} Feb 16 13:23:18 crc kubenswrapper[4816]: I0216 13:23:18.665042 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:18 crc kubenswrapper[4816]: I0216 13:23:18.683400 4816 generic.go:334] "Generic (PLEG): container finished" podID="ff515b67-05f3-478c-9613-cef5044dadaa" containerID="22a7346f911dacc5c0c309900a9551f250f0b975730d2b3b5083a38ea7a345ba" exitCode=0 Feb 16 13:23:18 crc kubenswrapper[4816]: I0216 13:23:18.683454 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b7bf89895-xstsf" event={"ID":"ff515b67-05f3-478c-9613-cef5044dadaa","Type":"ContainerDied","Data":"22a7346f911dacc5c0c309900a9551f250f0b975730d2b3b5083a38ea7a345ba"} Feb 16 13:23:18 crc kubenswrapper[4816]: I0216 13:23:18.903785 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:19 crc kubenswrapper[4816]: E0216 13:23:19.772448 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13dd3829_afd7_421b_8caf_6f789f71fc25.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.238301 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.379091 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data-custom\") pod \"ff515b67-05f3-478c-9613-cef5044dadaa\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.379182 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-combined-ca-bundle\") pod \"ff515b67-05f3-478c-9613-cef5044dadaa\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.379287 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff515b67-05f3-478c-9613-cef5044dadaa-logs\") pod \"ff515b67-05f3-478c-9613-cef5044dadaa\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.379313 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vflv\" (UniqueName: \"kubernetes.io/projected/ff515b67-05f3-478c-9613-cef5044dadaa-kube-api-access-2vflv\") pod \"ff515b67-05f3-478c-9613-cef5044dadaa\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.379364 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data\") pod \"ff515b67-05f3-478c-9613-cef5044dadaa\" (UID: \"ff515b67-05f3-478c-9613-cef5044dadaa\") " Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.380080 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff515b67-05f3-478c-9613-cef5044dadaa-logs" (OuterVolumeSpecName: "logs") pod "ff515b67-05f3-478c-9613-cef5044dadaa" (UID: "ff515b67-05f3-478c-9613-cef5044dadaa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.385215 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ff515b67-05f3-478c-9613-cef5044dadaa" (UID: "ff515b67-05f3-478c-9613-cef5044dadaa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.389453 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff515b67-05f3-478c-9613-cef5044dadaa-kube-api-access-2vflv" (OuterVolumeSpecName: "kube-api-access-2vflv") pod "ff515b67-05f3-478c-9613-cef5044dadaa" (UID: "ff515b67-05f3-478c-9613-cef5044dadaa"). InnerVolumeSpecName "kube-api-access-2vflv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.479456 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff515b67-05f3-478c-9613-cef5044dadaa" (UID: "ff515b67-05f3-478c-9613-cef5044dadaa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.479679 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data" (OuterVolumeSpecName: "config-data") pod "ff515b67-05f3-478c-9613-cef5044dadaa" (UID: "ff515b67-05f3-478c-9613-cef5044dadaa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.482070 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff515b67-05f3-478c-9613-cef5044dadaa-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.482108 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vflv\" (UniqueName: \"kubernetes.io/projected/ff515b67-05f3-478c-9613-cef5044dadaa-kube-api-access-2vflv\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.482120 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.482131 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.482139 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff515b67-05f3-478c-9613-cef5044dadaa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.716382 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerStarted","Data":"3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c"} Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.716697 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="ceilometer-central-agent" containerID="cri-o://6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d" gracePeriod=30 Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.716945 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.717230 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="proxy-httpd" containerID="cri-o://3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c" gracePeriod=30 Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.717348 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="ceilometer-notification-agent" containerID="cri-o://38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca" gracePeriod=30 Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.717408 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="sg-core" containerID="cri-o://adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00" gracePeriod=30 Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.738739 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b7bf89895-xstsf" event={"ID":"ff515b67-05f3-478c-9613-cef5044dadaa","Type":"ContainerDied","Data":"7feb82541029621b406f60d64f995bce37370167ed58fdf2f6ced31408237486"} Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.738793 4816 scope.go:117] "RemoveContainer" containerID="22a7346f911dacc5c0c309900a9551f250f0b975730d2b3b5083a38ea7a345ba" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.738860 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b7bf89895-xstsf" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.746747 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.667145948 podStartE2EDuration="46.746731305s" podCreationTimestamp="2026-02-16 13:22:34 +0000 UTC" firstStartedPulling="2026-02-16 13:22:35.993212742 +0000 UTC m=+1155.319926470" lastFinishedPulling="2026-02-16 13:23:20.072798099 +0000 UTC m=+1199.399511827" observedRunningTime="2026-02-16 13:23:20.743134526 +0000 UTC m=+1200.069848264" watchObservedRunningTime="2026-02-16 13:23:20.746731305 +0000 UTC m=+1200.073445033" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.783724 4816 scope.go:117] "RemoveContainer" containerID="26e8b6aa57a5f3a5980b56ccae6cd4d16e281f02afde6afe01b599e74cc5ae90" Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.796889 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-6b7bf89895-xstsf"] Feb 16 13:23:20 crc kubenswrapper[4816]: I0216 13:23:20.808821 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-6b7bf89895-xstsf"] Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.337003 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.395104 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-lgpxq"] Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.395410 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" podUID="d9735670-f4f7-4da6-8985-58eba2625c2c" containerName="dnsmasq-dns" containerID="cri-o://e8d1274ef3521c1907ffc55521056ccbd0722d4f2e08b68bcfa5cbfba0063961" gracePeriod=10 Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.432310 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff515b67-05f3-478c-9613-cef5044dadaa" path="/var/lib/kubelet/pods/ff515b67-05f3-478c-9613-cef5044dadaa/volumes" Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.770741 4816 generic.go:334] "Generic (PLEG): container finished" podID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerID="3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c" exitCode=0 Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.770806 4816 generic.go:334] "Generic (PLEG): container finished" podID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerID="adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00" exitCode=2 Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.770817 4816 generic.go:334] "Generic (PLEG): container finished" podID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerID="6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d" exitCode=0 Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.770898 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerDied","Data":"3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c"} Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.770953 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerDied","Data":"adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00"} Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.770969 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerDied","Data":"6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d"} Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.773313 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sqvh5" event={"ID":"af6d642c-ddbc-4faa-8871-ad5556ff1a64","Type":"ContainerStarted","Data":"0b08dd3d0fdfaadbe3e28364a7f5534f73e7435d1603256b3b6c48882b298347"} Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.782366 4816 generic.go:334] "Generic (PLEG): container finished" podID="d9735670-f4f7-4da6-8985-58eba2625c2c" containerID="e8d1274ef3521c1907ffc55521056ccbd0722d4f2e08b68bcfa5cbfba0063961" exitCode=0 Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.782498 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" event={"ID":"d9735670-f4f7-4da6-8985-58eba2625c2c","Type":"ContainerDied","Data":"e8d1274ef3521c1907ffc55521056ccbd0722d4f2e08b68bcfa5cbfba0063961"} Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.800203 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-sqvh5" podStartSLOduration=3.755018377 podStartE2EDuration="47.800185275s" podCreationTimestamp="2026-02-16 13:22:34 +0000 UTC" firstStartedPulling="2026-02-16 13:22:35.974187581 +0000 UTC m=+1155.300901309" lastFinishedPulling="2026-02-16 13:23:20.019354479 +0000 UTC m=+1199.346068207" observedRunningTime="2026-02-16 13:23:21.792602308 +0000 UTC m=+1201.119316036" watchObservedRunningTime="2026-02-16 13:23:21.800185275 +0000 UTC m=+1201.126899003" Feb 16 13:23:21 crc kubenswrapper[4816]: I0216 13:23:21.954322 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.048103 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-swift-storage-0\") pod \"d9735670-f4f7-4da6-8985-58eba2625c2c\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.048183 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-svc\") pod \"d9735670-f4f7-4da6-8985-58eba2625c2c\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.048208 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-config\") pod \"d9735670-f4f7-4da6-8985-58eba2625c2c\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.048274 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdl4r\" (UniqueName: \"kubernetes.io/projected/d9735670-f4f7-4da6-8985-58eba2625c2c-kube-api-access-mdl4r\") pod \"d9735670-f4f7-4da6-8985-58eba2625c2c\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.048407 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-nb\") pod \"d9735670-f4f7-4da6-8985-58eba2625c2c\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.048422 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-sb\") pod \"d9735670-f4f7-4da6-8985-58eba2625c2c\" (UID: \"d9735670-f4f7-4da6-8985-58eba2625c2c\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.074892 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9735670-f4f7-4da6-8985-58eba2625c2c-kube-api-access-mdl4r" (OuterVolumeSpecName: "kube-api-access-mdl4r") pod "d9735670-f4f7-4da6-8985-58eba2625c2c" (UID: "d9735670-f4f7-4da6-8985-58eba2625c2c"). InnerVolumeSpecName "kube-api-access-mdl4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.155018 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdl4r\" (UniqueName: \"kubernetes.io/projected/d9735670-f4f7-4da6-8985-58eba2625c2c-kube-api-access-mdl4r\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.155691 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d9735670-f4f7-4da6-8985-58eba2625c2c" (UID: "d9735670-f4f7-4da6-8985-58eba2625c2c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.158215 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d9735670-f4f7-4da6-8985-58eba2625c2c" (UID: "d9735670-f4f7-4da6-8985-58eba2625c2c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.160996 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d9735670-f4f7-4da6-8985-58eba2625c2c" (UID: "d9735670-f4f7-4da6-8985-58eba2625c2c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.185081 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-config" (OuterVolumeSpecName: "config") pod "d9735670-f4f7-4da6-8985-58eba2625c2c" (UID: "d9735670-f4f7-4da6-8985-58eba2625c2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.191351 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d9735670-f4f7-4da6-8985-58eba2625c2c" (UID: "d9735670-f4f7-4da6-8985-58eba2625c2c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.257621 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.257697 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.257708 4816 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.257728 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.257740 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9735670-f4f7-4da6-8985-58eba2625c2c-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.545171 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.675880 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-sg-core-conf-yaml\") pod \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.675932 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-scripts\") pod \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.675954 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-run-httpd\") pod \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.676713 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-combined-ca-bundle\") pod \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.676510 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "860e76e2-3fc6-4b66-8bb2-2e377153c53b" (UID: "860e76e2-3fc6-4b66-8bb2-2e377153c53b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.676809 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-config-data\") pod \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.676862 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-log-httpd\") pod \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.676907 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lj2k5\" (UniqueName: \"kubernetes.io/projected/860e76e2-3fc6-4b66-8bb2-2e377153c53b-kube-api-access-lj2k5\") pod \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\" (UID: \"860e76e2-3fc6-4b66-8bb2-2e377153c53b\") " Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.677574 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.679442 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "860e76e2-3fc6-4b66-8bb2-2e377153c53b" (UID: "860e76e2-3fc6-4b66-8bb2-2e377153c53b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.682863 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-scripts" (OuterVolumeSpecName: "scripts") pod "860e76e2-3fc6-4b66-8bb2-2e377153c53b" (UID: "860e76e2-3fc6-4b66-8bb2-2e377153c53b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.683277 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/860e76e2-3fc6-4b66-8bb2-2e377153c53b-kube-api-access-lj2k5" (OuterVolumeSpecName: "kube-api-access-lj2k5") pod "860e76e2-3fc6-4b66-8bb2-2e377153c53b" (UID: "860e76e2-3fc6-4b66-8bb2-2e377153c53b"). InnerVolumeSpecName "kube-api-access-lj2k5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.710787 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "860e76e2-3fc6-4b66-8bb2-2e377153c53b" (UID: "860e76e2-3fc6-4b66-8bb2-2e377153c53b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.777380 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "860e76e2-3fc6-4b66-8bb2-2e377153c53b" (UID: "860e76e2-3fc6-4b66-8bb2-2e377153c53b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.779787 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/860e76e2-3fc6-4b66-8bb2-2e377153c53b-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.779807 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lj2k5\" (UniqueName: \"kubernetes.io/projected/860e76e2-3fc6-4b66-8bb2-2e377153c53b-kube-api-access-lj2k5\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.779819 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.779828 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.779836 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.796680 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-config-data" (OuterVolumeSpecName: "config-data") pod "860e76e2-3fc6-4b66-8bb2-2e377153c53b" (UID: "860e76e2-3fc6-4b66-8bb2-2e377153c53b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.806474 4816 generic.go:334] "Generic (PLEG): container finished" podID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerID="38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca" exitCode=0 Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.806554 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerDied","Data":"38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca"} Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.806585 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"860e76e2-3fc6-4b66-8bb2-2e377153c53b","Type":"ContainerDied","Data":"b4cbf23e42f1c15a0d0273c776d4b9288a4fa85b86f700881a3b7530c658f545"} Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.806603 4816 scope.go:117] "RemoveContainer" containerID="3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.806757 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.814639 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" event={"ID":"d9735670-f4f7-4da6-8985-58eba2625c2c","Type":"ContainerDied","Data":"4f7ae28b84513960e14b7d97ed9bfa82b7bee59a6d07a3226de2820d90dd5743"} Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.814698 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-lgpxq" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.843912 4816 scope.go:117] "RemoveContainer" containerID="adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.864531 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.876943 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.881856 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/860e76e2-3fc6-4b66-8bb2-2e377153c53b-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.882241 4816 scope.go:117] "RemoveContainer" containerID="38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.886831 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-lgpxq"] Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.901163 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-lgpxq"] Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.908859 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.909534 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9735670-f4f7-4da6-8985-58eba2625c2c" containerName="dnsmasq-dns" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909555 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9735670-f4f7-4da6-8985-58eba2625c2c" containerName="dnsmasq-dns" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.909573 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9735670-f4f7-4da6-8985-58eba2625c2c" containerName="init" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909580 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9735670-f4f7-4da6-8985-58eba2625c2c" containerName="init" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.909596 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff515b67-05f3-478c-9613-cef5044dadaa" containerName="barbican-worker-log" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909602 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff515b67-05f3-478c-9613-cef5044dadaa" containerName="barbican-worker-log" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.909631 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="ceilometer-notification-agent" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909637 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="ceilometer-notification-agent" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.909669 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="proxy-httpd" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909676 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="proxy-httpd" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.909687 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="ceilometer-central-agent" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909694 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="ceilometer-central-agent" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.909715 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="sg-core" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909721 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="sg-core" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.909730 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff515b67-05f3-478c-9613-cef5044dadaa" containerName="barbican-worker" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909736 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff515b67-05f3-478c-9613-cef5044dadaa" containerName="barbican-worker" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909900 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff515b67-05f3-478c-9613-cef5044dadaa" containerName="barbican-worker-log" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909916 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="sg-core" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909929 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff515b67-05f3-478c-9613-cef5044dadaa" containerName="barbican-worker" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909938 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="proxy-httpd" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909948 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="ceilometer-central-agent" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909963 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9735670-f4f7-4da6-8985-58eba2625c2c" containerName="dnsmasq-dns" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.909974 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" containerName="ceilometer-notification-agent" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.911512 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.915874 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.917151 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.917605 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.917866 4816 scope.go:117] "RemoveContainer" containerID="6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.957365 4816 scope.go:117] "RemoveContainer" containerID="3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.959367 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c\": container with ID starting with 3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c not found: ID does not exist" containerID="3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.959404 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c"} err="failed to get container status \"3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c\": rpc error: code = NotFound desc = could not find container \"3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c\": container with ID starting with 3fe98d0b85025388f48a84e506ed75a23ee92a36e4e9db0843937728e06d4b6c not found: ID does not exist" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.959425 4816 scope.go:117] "RemoveContainer" containerID="adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.959739 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00\": container with ID starting with adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00 not found: ID does not exist" containerID="adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.959771 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00"} err="failed to get container status \"adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00\": rpc error: code = NotFound desc = could not find container \"adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00\": container with ID starting with adec7a45c2c07186cfd25ca791ecdc9ff52a6172426b29711cae8a491da83c00 not found: ID does not exist" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.959792 4816 scope.go:117] "RemoveContainer" containerID="38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.960090 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca\": container with ID starting with 38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca not found: ID does not exist" containerID="38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.960112 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca"} err="failed to get container status \"38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca\": rpc error: code = NotFound desc = could not find container \"38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca\": container with ID starting with 38efbad4d226d3673ba71f5c9a6ad8296eb5a453118a5163eb2c95039da98bca not found: ID does not exist" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.960124 4816 scope.go:117] "RemoveContainer" containerID="6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d" Feb 16 13:23:22 crc kubenswrapper[4816]: E0216 13:23:22.960315 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d\": container with ID starting with 6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d not found: ID does not exist" containerID="6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.960369 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d"} err="failed to get container status \"6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d\": rpc error: code = NotFound desc = could not find container \"6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d\": container with ID starting with 6753cb4830654901df43079777002c76b0acbfeb577b4910864e2868e841403d not found: ID does not exist" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.960384 4816 scope.go:117] "RemoveContainer" containerID="e8d1274ef3521c1907ffc55521056ccbd0722d4f2e08b68bcfa5cbfba0063961" Feb 16 13:23:22 crc kubenswrapper[4816]: I0216 13:23:22.998720 4816 scope.go:117] "RemoveContainer" containerID="3156059089ec21e28a45ad80fc0007678973725b9d2a881f436cab57f3835c22" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.089036 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.089085 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-872ks\" (UniqueName: \"kubernetes.io/projected/ad6c02a5-54f8-43d0-9526-5933be302a31-kube-api-access-872ks\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.089154 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-log-httpd\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.089203 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-scripts\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.089255 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.089290 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-run-httpd\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.089343 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-config-data\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.191087 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.191238 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-run-httpd\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.191328 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-config-data\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.191355 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.191422 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-872ks\" (UniqueName: \"kubernetes.io/projected/ad6c02a5-54f8-43d0-9526-5933be302a31-kube-api-access-872ks\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.191475 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-log-httpd\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.191575 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-scripts\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.198252 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-run-httpd\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.198893 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.199373 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-log-httpd\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.199578 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.204494 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-config-data\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.206760 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-scripts\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.223507 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-872ks\" (UniqueName: \"kubernetes.io/projected/ad6c02a5-54f8-43d0-9526-5933be302a31-kube-api-access-872ks\") pod \"ceilometer-0\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.228109 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.415678 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="860e76e2-3fc6-4b66-8bb2-2e377153c53b" path="/var/lib/kubelet/pods/860e76e2-3fc6-4b66-8bb2-2e377153c53b/volumes" Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.416618 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9735670-f4f7-4da6-8985-58eba2625c2c" path="/var/lib/kubelet/pods/d9735670-f4f7-4da6-8985-58eba2625c2c/volumes" Feb 16 13:23:23 crc kubenswrapper[4816]: W0216 13:23:23.714607 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad6c02a5_54f8_43d0_9526_5933be302a31.slice/crio-b3f1e83fa46d92886eb8e11af3dc4eaf2e6850c26d4cad038bcaf5c7ead981cf WatchSource:0}: Error finding container b3f1e83fa46d92886eb8e11af3dc4eaf2e6850c26d4cad038bcaf5c7ead981cf: Status 404 returned error can't find the container with id b3f1e83fa46d92886eb8e11af3dc4eaf2e6850c26d4cad038bcaf5c7ead981cf Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.727503 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:23 crc kubenswrapper[4816]: I0216 13:23:23.827380 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerStarted","Data":"b3f1e83fa46d92886eb8e11af3dc4eaf2e6850c26d4cad038bcaf5c7ead981cf"} Feb 16 13:23:24 crc kubenswrapper[4816]: I0216 13:23:24.379398 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:24 crc kubenswrapper[4816]: I0216 13:23:24.380978 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:23:24 crc kubenswrapper[4816]: I0216 13:23:24.473453 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7dd6997456-7z4ck"] Feb 16 13:23:24 crc kubenswrapper[4816]: I0216 13:23:24.473919 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7dd6997456-7z4ck" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api-log" containerID="cri-o://332aaccdf8e9202da5c9d1efd02710688b01fcfcafd2c2088b80968af072484c" gracePeriod=30 Feb 16 13:23:24 crc kubenswrapper[4816]: I0216 13:23:24.474065 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7dd6997456-7z4ck" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api" containerID="cri-o://073f1f1add3cf51e4a0fe170d3f3ff0fb0431f417c70a7b522e6b526f0b43f40" gracePeriod=30 Feb 16 13:23:24 crc kubenswrapper[4816]: I0216 13:23:24.845006 4816 generic.go:334] "Generic (PLEG): container finished" podID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerID="332aaccdf8e9202da5c9d1efd02710688b01fcfcafd2c2088b80968af072484c" exitCode=143 Feb 16 13:23:24 crc kubenswrapper[4816]: I0216 13:23:24.845123 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7dd6997456-7z4ck" event={"ID":"3674313a-95cf-4c2c-b15b-39045c4ad09d","Type":"ContainerDied","Data":"332aaccdf8e9202da5c9d1efd02710688b01fcfcafd2c2088b80968af072484c"} Feb 16 13:23:25 crc kubenswrapper[4816]: I0216 13:23:25.860102 4816 generic.go:334] "Generic (PLEG): container finished" podID="af6d642c-ddbc-4faa-8871-ad5556ff1a64" containerID="0b08dd3d0fdfaadbe3e28364a7f5534f73e7435d1603256b3b6c48882b298347" exitCode=0 Feb 16 13:23:25 crc kubenswrapper[4816]: I0216 13:23:25.860213 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sqvh5" event={"ID":"af6d642c-ddbc-4faa-8871-ad5556ff1a64","Type":"ContainerDied","Data":"0b08dd3d0fdfaadbe3e28364a7f5534f73e7435d1603256b3b6c48882b298347"} Feb 16 13:23:25 crc kubenswrapper[4816]: I0216 13:23:25.866883 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerStarted","Data":"17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6"} Feb 16 13:23:25 crc kubenswrapper[4816]: I0216 13:23:25.866953 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerStarted","Data":"4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325"} Feb 16 13:23:26 crc kubenswrapper[4816]: I0216 13:23:26.878900 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerStarted","Data":"625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db"} Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.275030 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.477361 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-db-sync-config-data\") pod \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.477564 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgcb2\" (UniqueName: \"kubernetes.io/projected/af6d642c-ddbc-4faa-8871-ad5556ff1a64-kube-api-access-rgcb2\") pod \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.477603 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-scripts\") pod \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.477724 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-config-data\") pod \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.477771 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/af6d642c-ddbc-4faa-8871-ad5556ff1a64-etc-machine-id\") pod \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.478047 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-combined-ca-bundle\") pod \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\" (UID: \"af6d642c-ddbc-4faa-8871-ad5556ff1a64\") " Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.478138 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af6d642c-ddbc-4faa-8871-ad5556ff1a64-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "af6d642c-ddbc-4faa-8871-ad5556ff1a64" (UID: "af6d642c-ddbc-4faa-8871-ad5556ff1a64"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.479731 4816 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/af6d642c-ddbc-4faa-8871-ad5556ff1a64-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.485412 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-scripts" (OuterVolumeSpecName: "scripts") pod "af6d642c-ddbc-4faa-8871-ad5556ff1a64" (UID: "af6d642c-ddbc-4faa-8871-ad5556ff1a64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.487020 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "af6d642c-ddbc-4faa-8871-ad5556ff1a64" (UID: "af6d642c-ddbc-4faa-8871-ad5556ff1a64"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.498883 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af6d642c-ddbc-4faa-8871-ad5556ff1a64-kube-api-access-rgcb2" (OuterVolumeSpecName: "kube-api-access-rgcb2") pod "af6d642c-ddbc-4faa-8871-ad5556ff1a64" (UID: "af6d642c-ddbc-4faa-8871-ad5556ff1a64"). InnerVolumeSpecName "kube-api-access-rgcb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.524589 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "af6d642c-ddbc-4faa-8871-ad5556ff1a64" (UID: "af6d642c-ddbc-4faa-8871-ad5556ff1a64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.558137 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-config-data" (OuterVolumeSpecName: "config-data") pod "af6d642c-ddbc-4faa-8871-ad5556ff1a64" (UID: "af6d642c-ddbc-4faa-8871-ad5556ff1a64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.585127 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgcb2\" (UniqueName: \"kubernetes.io/projected/af6d642c-ddbc-4faa-8871-ad5556ff1a64-kube-api-access-rgcb2\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.585197 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.585218 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.585236 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.585256 4816 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/af6d642c-ddbc-4faa-8871-ad5556ff1a64-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.644156 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7dd6997456-7z4ck" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:38888->10.217.0.158:9311: read: connection reset by peer" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.644497 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7dd6997456-7z4ck" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:38886->10.217.0.158:9311: read: connection reset by peer" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.913735 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sqvh5" event={"ID":"af6d642c-ddbc-4faa-8871-ad5556ff1a64","Type":"ContainerDied","Data":"09afed434d231bdcb0c85436275e7373b88bca26134aa26d1a699b1f49d1709e"} Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.914063 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09afed434d231bdcb0c85436275e7373b88bca26134aa26d1a699b1f49d1709e" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.913745 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sqvh5" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.938379 4816 generic.go:334] "Generic (PLEG): container finished" podID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerID="073f1f1add3cf51e4a0fe170d3f3ff0fb0431f417c70a7b522e6b526f0b43f40" exitCode=0 Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.938448 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7dd6997456-7z4ck" event={"ID":"3674313a-95cf-4c2c-b15b-39045c4ad09d","Type":"ContainerDied","Data":"073f1f1add3cf51e4a0fe170d3f3ff0fb0431f417c70a7b522e6b526f0b43f40"} Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.943496 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerStarted","Data":"d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac"} Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.943736 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 13:23:27 crc kubenswrapper[4816]: I0216 13:23:27.972881 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.148115767 podStartE2EDuration="5.972862103s" podCreationTimestamp="2026-02-16 13:23:22 +0000 UTC" firstStartedPulling="2026-02-16 13:23:23.716799138 +0000 UTC m=+1203.043512876" lastFinishedPulling="2026-02-16 13:23:27.541545464 +0000 UTC m=+1206.868259212" observedRunningTime="2026-02-16 13:23:27.963734234 +0000 UTC m=+1207.290447962" watchObservedRunningTime="2026-02-16 13:23:27.972862103 +0000 UTC m=+1207.299575841" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.019421 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.095535 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data-custom\") pod \"3674313a-95cf-4c2c-b15b-39045c4ad09d\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.095583 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5llf\" (UniqueName: \"kubernetes.io/projected/3674313a-95cf-4c2c-b15b-39045c4ad09d-kube-api-access-l5llf\") pod \"3674313a-95cf-4c2c-b15b-39045c4ad09d\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.095623 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-combined-ca-bundle\") pod \"3674313a-95cf-4c2c-b15b-39045c4ad09d\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.095672 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data\") pod \"3674313a-95cf-4c2c-b15b-39045c4ad09d\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.095861 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3674313a-95cf-4c2c-b15b-39045c4ad09d-logs\") pod \"3674313a-95cf-4c2c-b15b-39045c4ad09d\" (UID: \"3674313a-95cf-4c2c-b15b-39045c4ad09d\") " Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.096761 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3674313a-95cf-4c2c-b15b-39045c4ad09d-logs" (OuterVolumeSpecName: "logs") pod "3674313a-95cf-4c2c-b15b-39045c4ad09d" (UID: "3674313a-95cf-4c2c-b15b-39045c4ad09d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.105610 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3674313a-95cf-4c2c-b15b-39045c4ad09d" (UID: "3674313a-95cf-4c2c-b15b-39045c4ad09d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.112164 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3674313a-95cf-4c2c-b15b-39045c4ad09d-kube-api-access-l5llf" (OuterVolumeSpecName: "kube-api-access-l5llf") pod "3674313a-95cf-4c2c-b15b-39045c4ad09d" (UID: "3674313a-95cf-4c2c-b15b-39045c4ad09d"). InnerVolumeSpecName "kube-api-access-l5llf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.149639 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3674313a-95cf-4c2c-b15b-39045c4ad09d" (UID: "3674313a-95cf-4c2c-b15b-39045c4ad09d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.201638 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3674313a-95cf-4c2c-b15b-39045c4ad09d-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.201678 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.201689 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5llf\" (UniqueName: \"kubernetes.io/projected/3674313a-95cf-4c2c-b15b-39045c4ad09d-kube-api-access-l5llf\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.201698 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.207541 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data" (OuterVolumeSpecName: "config-data") pod "3674313a-95cf-4c2c-b15b-39045c4ad09d" (UID: "3674313a-95cf-4c2c-b15b-39045c4ad09d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.238749 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:23:28 crc kubenswrapper[4816]: E0216 13:23:28.240341 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af6d642c-ddbc-4faa-8871-ad5556ff1a64" containerName="cinder-db-sync" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.240368 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="af6d642c-ddbc-4faa-8871-ad5556ff1a64" containerName="cinder-db-sync" Feb 16 13:23:28 crc kubenswrapper[4816]: E0216 13:23:28.240393 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.240403 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api" Feb 16 13:23:28 crc kubenswrapper[4816]: E0216 13:23:28.240539 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api-log" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.240551 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api-log" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.241030 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api-log" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.241050 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="af6d642c-ddbc-4faa-8871-ad5556ff1a64" containerName="cinder-db-sync" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.241092 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" containerName="barbican-api" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.244203 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.247193 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.247401 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.247548 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.247726 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-6vjhh" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.255109 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.299851 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-b24bk"] Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.301638 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.308729 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.308798 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.308842 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.308870 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-scripts\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.308896 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpvvb\" (UniqueName: \"kubernetes.io/projected/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-kube-api-access-wpvvb\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.308979 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.309115 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3674313a-95cf-4c2c-b15b-39045c4ad09d-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.335337 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-b24bk"] Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.411954 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-config\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412090 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412122 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxmtq\" (UniqueName: \"kubernetes.io/projected/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-kube-api-access-jxmtq\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412219 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412254 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412370 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412393 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412435 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-scripts\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412459 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412483 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpvvb\" (UniqueName: \"kubernetes.io/projected/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-kube-api-access-wpvvb\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412513 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412532 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.412635 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.418251 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.418713 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.420563 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.431644 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-scripts\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.432321 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpvvb\" (UniqueName: \"kubernetes.io/projected/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-kube-api-access-wpvvb\") pod \"cinder-scheduler-0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.514496 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.514562 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.514628 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-config\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.514736 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxmtq\" (UniqueName: \"kubernetes.io/projected/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-kube-api-access-jxmtq\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.514877 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.514907 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.515543 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.520080 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-config\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.520086 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.520633 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.521366 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.557525 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxmtq\" (UniqueName: \"kubernetes.io/projected/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-kube-api-access-jxmtq\") pod \"dnsmasq-dns-6bb4fc677f-b24bk\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.601519 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.603565 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.613780 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.639320 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.644621 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.663397 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.729581 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.729697 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f86b7a07-1fb1-4670-852a-637ae4c620b9-logs\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.729762 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9g8k9\" (UniqueName: \"kubernetes.io/projected/f86b7a07-1fb1-4670-852a-637ae4c620b9-kube-api-access-9g8k9\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.729784 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.729805 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data-custom\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.729913 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f86b7a07-1fb1-4670-852a-637ae4c620b9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.729932 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-scripts\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.835709 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.835783 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f86b7a07-1fb1-4670-852a-637ae4c620b9-logs\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.835823 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9g8k9\" (UniqueName: \"kubernetes.io/projected/f86b7a07-1fb1-4670-852a-637ae4c620b9-kube-api-access-9g8k9\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.835844 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.836172 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data-custom\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.836234 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f86b7a07-1fb1-4670-852a-637ae4c620b9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.836249 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-scripts\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.837933 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f86b7a07-1fb1-4670-852a-637ae4c620b9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.840357 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f86b7a07-1fb1-4670-852a-637ae4c620b9-logs\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.841562 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.845396 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.846092 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-scripts\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.868907 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data-custom\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.881968 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9g8k9\" (UniqueName: \"kubernetes.io/projected/f86b7a07-1fb1-4670-852a-637ae4c620b9-kube-api-access-9g8k9\") pod \"cinder-api-0\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " pod="openstack/cinder-api-0" Feb 16 13:23:28 crc kubenswrapper[4816]: I0216 13:23:28.951985 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.006903 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7dd6997456-7z4ck" Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.017280 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7dd6997456-7z4ck" event={"ID":"3674313a-95cf-4c2c-b15b-39045c4ad09d","Type":"ContainerDied","Data":"b4a93dd37996291f27020e4a38c9351923ead5d4e66b87bcd631433e38cfd636"} Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.017376 4816 scope.go:117] "RemoveContainer" containerID="073f1f1add3cf51e4a0fe170d3f3ff0fb0431f417c70a7b522e6b526f0b43f40" Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.064950 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7dd6997456-7z4ck"] Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.075039 4816 scope.go:117] "RemoveContainer" containerID="332aaccdf8e9202da5c9d1efd02710688b01fcfcafd2c2088b80968af072484c" Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.094283 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7dd6997456-7z4ck"] Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.282197 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.420421 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3674313a-95cf-4c2c-b15b-39045c4ad09d" path="/var/lib/kubelet/pods/3674313a-95cf-4c2c-b15b-39045c4ad09d/volumes" Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.421112 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-b24bk"] Feb 16 13:23:29 crc kubenswrapper[4816]: I0216 13:23:29.557060 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:23:30 crc kubenswrapper[4816]: I0216 13:23:30.041299 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f86b7a07-1fb1-4670-852a-637ae4c620b9","Type":"ContainerStarted","Data":"d09dabff5a7facad8c6c811f12431552d77beecd469ba7e51ce7d5a465607da7"} Feb 16 13:23:30 crc kubenswrapper[4816]: I0216 13:23:30.043195 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0","Type":"ContainerStarted","Data":"690077c3cdb41f1c26fcf6a07e7f213e3ac043b5dcf12b9ca6d81b1c6a5ba45f"} Feb 16 13:23:30 crc kubenswrapper[4816]: I0216 13:23:30.070837 4816 generic.go:334] "Generic (PLEG): container finished" podID="e1c78c8f-f023-4a68-b9ce-52b09090a1e2" containerID="c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2" exitCode=0 Feb 16 13:23:30 crc kubenswrapper[4816]: I0216 13:23:30.070890 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" event={"ID":"e1c78c8f-f023-4a68-b9ce-52b09090a1e2","Type":"ContainerDied","Data":"c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2"} Feb 16 13:23:30 crc kubenswrapper[4816]: I0216 13:23:30.070916 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" event={"ID":"e1c78c8f-f023-4a68-b9ce-52b09090a1e2","Type":"ContainerStarted","Data":"11612a57ba2235e3d89c6c8f3507138f19691239dfcff7fda51eccf749173941"} Feb 16 13:23:30 crc kubenswrapper[4816]: I0216 13:23:30.889920 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:23:31 crc kubenswrapper[4816]: I0216 13:23:31.099723 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f86b7a07-1fb1-4670-852a-637ae4c620b9","Type":"ContainerStarted","Data":"d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a"} Feb 16 13:23:31 crc kubenswrapper[4816]: I0216 13:23:31.104784 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0","Type":"ContainerStarted","Data":"07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9"} Feb 16 13:23:31 crc kubenswrapper[4816]: I0216 13:23:31.112386 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" event={"ID":"e1c78c8f-f023-4a68-b9ce-52b09090a1e2","Type":"ContainerStarted","Data":"310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52"} Feb 16 13:23:31 crc kubenswrapper[4816]: I0216 13:23:31.112568 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:31 crc kubenswrapper[4816]: I0216 13:23:31.135289 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" podStartSLOduration=3.13526763 podStartE2EDuration="3.13526763s" podCreationTimestamp="2026-02-16 13:23:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:31.128955818 +0000 UTC m=+1210.455669566" watchObservedRunningTime="2026-02-16 13:23:31.13526763 +0000 UTC m=+1210.461981368" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.170125 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f86b7a07-1fb1-4670-852a-637ae4c620b9","Type":"ContainerStarted","Data":"5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2"} Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.170487 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.170328 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerName="cinder-api" containerID="cri-o://5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2" gracePeriod=30 Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.170269 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerName="cinder-api-log" containerID="cri-o://d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a" gracePeriod=30 Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.172379 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0","Type":"ContainerStarted","Data":"33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb"} Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.198419 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.198396585 podStartE2EDuration="4.198396585s" podCreationTimestamp="2026-02-16 13:23:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:32.195690841 +0000 UTC m=+1211.522404569" watchObservedRunningTime="2026-02-16 13:23:32.198396585 +0000 UTC m=+1211.525110323" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.223419 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.451898447 podStartE2EDuration="4.223401628s" podCreationTimestamp="2026-02-16 13:23:28 +0000 UTC" firstStartedPulling="2026-02-16 13:23:29.298320532 +0000 UTC m=+1208.625034260" lastFinishedPulling="2026-02-16 13:23:30.069823713 +0000 UTC m=+1209.396537441" observedRunningTime="2026-02-16 13:23:32.216393567 +0000 UTC m=+1211.543107305" watchObservedRunningTime="2026-02-16 13:23:32.223401628 +0000 UTC m=+1211.550115356" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.792674 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.973026 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data\") pod \"f86b7a07-1fb1-4670-852a-637ae4c620b9\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.973083 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-scripts\") pod \"f86b7a07-1fb1-4670-852a-637ae4c620b9\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.973118 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f86b7a07-1fb1-4670-852a-637ae4c620b9-etc-machine-id\") pod \"f86b7a07-1fb1-4670-852a-637ae4c620b9\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.973160 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-combined-ca-bundle\") pod \"f86b7a07-1fb1-4670-852a-637ae4c620b9\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.973200 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f86b7a07-1fb1-4670-852a-637ae4c620b9-logs\") pod \"f86b7a07-1fb1-4670-852a-637ae4c620b9\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.973242 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f86b7a07-1fb1-4670-852a-637ae4c620b9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f86b7a07-1fb1-4670-852a-637ae4c620b9" (UID: "f86b7a07-1fb1-4670-852a-637ae4c620b9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.973367 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data-custom\") pod \"f86b7a07-1fb1-4670-852a-637ae4c620b9\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.973509 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9g8k9\" (UniqueName: \"kubernetes.io/projected/f86b7a07-1fb1-4670-852a-637ae4c620b9-kube-api-access-9g8k9\") pod \"f86b7a07-1fb1-4670-852a-637ae4c620b9\" (UID: \"f86b7a07-1fb1-4670-852a-637ae4c620b9\") " Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.973759 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f86b7a07-1fb1-4670-852a-637ae4c620b9-logs" (OuterVolumeSpecName: "logs") pod "f86b7a07-1fb1-4670-852a-637ae4c620b9" (UID: "f86b7a07-1fb1-4670-852a-637ae4c620b9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.974094 4816 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f86b7a07-1fb1-4670-852a-637ae4c620b9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.974130 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f86b7a07-1fb1-4670-852a-637ae4c620b9-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.981816 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f86b7a07-1fb1-4670-852a-637ae4c620b9" (UID: "f86b7a07-1fb1-4670-852a-637ae4c620b9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:32 crc kubenswrapper[4816]: I0216 13:23:32.981885 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f86b7a07-1fb1-4670-852a-637ae4c620b9-kube-api-access-9g8k9" (OuterVolumeSpecName: "kube-api-access-9g8k9") pod "f86b7a07-1fb1-4670-852a-637ae4c620b9" (UID: "f86b7a07-1fb1-4670-852a-637ae4c620b9"). InnerVolumeSpecName "kube-api-access-9g8k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.003882 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-scripts" (OuterVolumeSpecName: "scripts") pod "f86b7a07-1fb1-4670-852a-637ae4c620b9" (UID: "f86b7a07-1fb1-4670-852a-637ae4c620b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.025158 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f86b7a07-1fb1-4670-852a-637ae4c620b9" (UID: "f86b7a07-1fb1-4670-852a-637ae4c620b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.042470 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data" (OuterVolumeSpecName: "config-data") pod "f86b7a07-1fb1-4670-852a-637ae4c620b9" (UID: "f86b7a07-1fb1-4670-852a-637ae4c620b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.076148 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.076178 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.076187 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.076198 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f86b7a07-1fb1-4670-852a-637ae4c620b9-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.076206 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9g8k9\" (UniqueName: \"kubernetes.io/projected/f86b7a07-1fb1-4670-852a-637ae4c620b9-kube-api-access-9g8k9\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.185821 4816 generic.go:334] "Generic (PLEG): container finished" podID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerID="5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2" exitCode=0 Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.185854 4816 generic.go:334] "Generic (PLEG): container finished" podID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerID="d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a" exitCode=143 Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.185915 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.185907 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f86b7a07-1fb1-4670-852a-637ae4c620b9","Type":"ContainerDied","Data":"5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2"} Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.185976 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f86b7a07-1fb1-4670-852a-637ae4c620b9","Type":"ContainerDied","Data":"d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a"} Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.185993 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f86b7a07-1fb1-4670-852a-637ae4c620b9","Type":"ContainerDied","Data":"d09dabff5a7facad8c6c811f12431552d77beecd469ba7e51ce7d5a465607da7"} Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.186015 4816 scope.go:117] "RemoveContainer" containerID="5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.213288 4816 scope.go:117] "RemoveContainer" containerID="d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.222693 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.234808 4816 scope.go:117] "RemoveContainer" containerID="5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2" Feb 16 13:23:33 crc kubenswrapper[4816]: E0216 13:23:33.235393 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2\": container with ID starting with 5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2 not found: ID does not exist" containerID="5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.235453 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2"} err="failed to get container status \"5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2\": rpc error: code = NotFound desc = could not find container \"5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2\": container with ID starting with 5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2 not found: ID does not exist" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.235484 4816 scope.go:117] "RemoveContainer" containerID="d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a" Feb 16 13:23:33 crc kubenswrapper[4816]: E0216 13:23:33.235839 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a\": container with ID starting with d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a not found: ID does not exist" containerID="d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.235881 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a"} err="failed to get container status \"d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a\": rpc error: code = NotFound desc = could not find container \"d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a\": container with ID starting with d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a not found: ID does not exist" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.235908 4816 scope.go:117] "RemoveContainer" containerID="5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.237580 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2"} err="failed to get container status \"5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2\": rpc error: code = NotFound desc = could not find container \"5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2\": container with ID starting with 5bd074ccda5be1388e8984764653120024707f6331aaef291e6741d39c44bed2 not found: ID does not exist" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.237604 4816 scope.go:117] "RemoveContainer" containerID="d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.237871 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a"} err="failed to get container status \"d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a\": rpc error: code = NotFound desc = could not find container \"d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a\": container with ID starting with d260016bf6e1bffcb8a87be35411a01675cf3706cc03d3971c32061b6f04fa2a not found: ID does not exist" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.238398 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.250683 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:23:33 crc kubenswrapper[4816]: E0216 13:23:33.251206 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerName="cinder-api" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.251230 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerName="cinder-api" Feb 16 13:23:33 crc kubenswrapper[4816]: E0216 13:23:33.251251 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerName="cinder-api-log" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.251260 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerName="cinder-api-log" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.251506 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerName="cinder-api" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.251542 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f86b7a07-1fb1-4670-852a-637ae4c620b9" containerName="cinder-api-log" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.252837 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.255895 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.257055 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.257288 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.258346 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.282034 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-scripts\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.282120 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data-custom\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.282230 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/975a9d1d-44d0-4b11-8a41-8f237da1ad85-logs\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.282259 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.282304 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-public-tls-certs\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.282336 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.282357 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.282372 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg2zl\" (UniqueName: \"kubernetes.io/projected/975a9d1d-44d0-4b11-8a41-8f237da1ad85-kube-api-access-jg2zl\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.282393 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/975a9d1d-44d0-4b11-8a41-8f237da1ad85-etc-machine-id\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.384444 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-public-tls-certs\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.384517 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.384549 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.384572 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg2zl\" (UniqueName: \"kubernetes.io/projected/975a9d1d-44d0-4b11-8a41-8f237da1ad85-kube-api-access-jg2zl\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.384600 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/975a9d1d-44d0-4b11-8a41-8f237da1ad85-etc-machine-id\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.384675 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-scripts\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.384722 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data-custom\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.384802 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/975a9d1d-44d0-4b11-8a41-8f237da1ad85-logs\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.384842 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.385069 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/975a9d1d-44d0-4b11-8a41-8f237da1ad85-etc-machine-id\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.385522 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/975a9d1d-44d0-4b11-8a41-8f237da1ad85-logs\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.390021 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.390257 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-scripts\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.391373 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-public-tls-certs\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.391871 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.393067 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.402869 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg2zl\" (UniqueName: \"kubernetes.io/projected/975a9d1d-44d0-4b11-8a41-8f237da1ad85-kube-api-access-jg2zl\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.403203 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data-custom\") pod \"cinder-api-0\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.410281 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f86b7a07-1fb1-4670-852a-637ae4c620b9" path="/var/lib/kubelet/pods/f86b7a07-1fb1-4670-852a-637ae4c620b9/volumes" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.575418 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 13:23:33 crc kubenswrapper[4816]: I0216 13:23:33.645598 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 16 13:23:34 crc kubenswrapper[4816]: W0216 13:23:34.048502 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod975a9d1d_44d0_4b11_8a41_8f237da1ad85.slice/crio-5184f1623c127756f239d67fca1a99d1f2ef05617eb57a75de180cf64199386a WatchSource:0}: Error finding container 5184f1623c127756f239d67fca1a99d1f2ef05617eb57a75de180cf64199386a: Status 404 returned error can't find the container with id 5184f1623c127756f239d67fca1a99d1f2ef05617eb57a75de180cf64199386a Feb 16 13:23:34 crc kubenswrapper[4816]: I0216 13:23:34.051562 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:23:34 crc kubenswrapper[4816]: I0216 13:23:34.197024 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"975a9d1d-44d0-4b11-8a41-8f237da1ad85","Type":"ContainerStarted","Data":"5184f1623c127756f239d67fca1a99d1f2ef05617eb57a75de180cf64199386a"} Feb 16 13:23:35 crc kubenswrapper[4816]: I0216 13:23:35.077412 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-bf8ff5468-dcb5b" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerName="neutron-httpd" probeResult="failure" output="Get \"http://10.217.0.148:9696/\": dial tcp 10.217.0.148:9696: connect: connection refused" Feb 16 13:23:35 crc kubenswrapper[4816]: I0216 13:23:35.214402 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"975a9d1d-44d0-4b11-8a41-8f237da1ad85","Type":"ContainerStarted","Data":"e1555ec2ef7c331226bf5f3c9dd304c9a719ac9687ba3e0531c29bd7c838c76b"} Feb 16 13:23:35 crc kubenswrapper[4816]: I0216 13:23:35.214622 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 16 13:23:35 crc kubenswrapper[4816]: I0216 13:23:35.255483 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.255457135 podStartE2EDuration="2.255457135s" podCreationTimestamp="2026-02-16 13:23:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:35.252870994 +0000 UTC m=+1214.579584732" watchObservedRunningTime="2026-02-16 13:23:35.255457135 +0000 UTC m=+1214.582170873" Feb 16 13:23:36 crc kubenswrapper[4816]: I0216 13:23:36.227038 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"975a9d1d-44d0-4b11-8a41-8f237da1ad85","Type":"ContainerStarted","Data":"b4a948c900f30d0434262ce037301027d88ee90b747aed9a2dc7d36fc8c7b454"} Feb 16 13:23:36 crc kubenswrapper[4816]: I0216 13:23:36.940624 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:23:36 crc kubenswrapper[4816]: I0216 13:23:36.940692 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:23:37 crc kubenswrapper[4816]: I0216 13:23:37.103279 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:37 crc kubenswrapper[4816]: I0216 13:23:37.136162 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:37 crc kubenswrapper[4816]: I0216 13:23:37.360773 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:37 crc kubenswrapper[4816]: I0216 13:23:37.883896 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:37 crc kubenswrapper[4816]: I0216 13:23:37.885011 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:23:37 crc kubenswrapper[4816]: I0216 13:23:37.979655 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-55b8c8fc68-7vq25"] Feb 16 13:23:38 crc kubenswrapper[4816]: I0216 13:23:38.252397 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-55b8c8fc68-7vq25" podUID="1c830726-778d-4471-98f0-abe404146440" containerName="placement-log" containerID="cri-o://5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1" gracePeriod=30 Feb 16 13:23:38 crc kubenswrapper[4816]: I0216 13:23:38.252487 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-55b8c8fc68-7vq25" podUID="1c830726-778d-4471-98f0-abe404146440" containerName="placement-api" containerID="cri-o://8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8" gracePeriod=30 Feb 16 13:23:38 crc kubenswrapper[4816]: I0216 13:23:38.538074 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:23:38 crc kubenswrapper[4816]: I0216 13:23:38.665831 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:23:38 crc kubenswrapper[4816]: I0216 13:23:38.728961 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-6g2pf"] Feb 16 13:23:38 crc kubenswrapper[4816]: I0216 13:23:38.729243 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" podUID="3a864762-5560-46b5-86ef-1ad6dd3adfa7" containerName="dnsmasq-dns" containerID="cri-o://df32c655695662af3f4785beadfd5108fbf95a04faa2884f5decc908c0f8b4c1" gracePeriod=10 Feb 16 13:23:38 crc kubenswrapper[4816]: I0216 13:23:38.934241 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 16 13:23:38 crc kubenswrapper[4816]: I0216 13:23:38.997251 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.178136 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.254148 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6487d4d9b-cbp92"] Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.254403 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6487d4d9b-cbp92" podUID="e948086b-213d-4435-a751-c716f71b95f2" containerName="neutron-api" containerID="cri-o://403d7d0e05eebee102b2d2a528727671b1ddf72a2a83a41898051db48d730e96" gracePeriod=30 Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.254805 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6487d4d9b-cbp92" podUID="e948086b-213d-4435-a751-c716f71b95f2" containerName="neutron-httpd" containerID="cri-o://4c6c7ae0ae2a2aaf24f32900cba79dfeed3ae6f7ea3a213eed8d31fe5f2e53fd" gracePeriod=30 Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.281551 4816 generic.go:334] "Generic (PLEG): container finished" podID="1c830726-778d-4471-98f0-abe404146440" containerID="5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1" exitCode=143 Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.281655 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55b8c8fc68-7vq25" event={"ID":"1c830726-778d-4471-98f0-abe404146440","Type":"ContainerDied","Data":"5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1"} Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.287401 4816 generic.go:334] "Generic (PLEG): container finished" podID="3a864762-5560-46b5-86ef-1ad6dd3adfa7" containerID="df32c655695662af3f4785beadfd5108fbf95a04faa2884f5decc908c0f8b4c1" exitCode=0 Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.287470 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" event={"ID":"3a864762-5560-46b5-86ef-1ad6dd3adfa7","Type":"ContainerDied","Data":"df32c655695662af3f4785beadfd5108fbf95a04faa2884f5decc908c0f8b4c1"} Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.287522 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" event={"ID":"3a864762-5560-46b5-86ef-1ad6dd3adfa7","Type":"ContainerDied","Data":"4b6b0ecb9df21741fb767f0d59948463e2109d7b931081e851954d0f93f38dde"} Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.287533 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b6b0ecb9df21741fb767f0d59948463e2109d7b931081e851954d0f93f38dde" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.287691 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerName="probe" containerID="cri-o://33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb" gracePeriod=30 Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.287840 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerName="cinder-scheduler" containerID="cri-o://07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9" gracePeriod=30 Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.313349 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.417996 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xhfs\" (UniqueName: \"kubernetes.io/projected/3a864762-5560-46b5-86ef-1ad6dd3adfa7-kube-api-access-2xhfs\") pod \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.418135 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-svc\") pod \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.418236 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-config\") pod \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.418305 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-sb\") pod \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.418343 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-nb\") pod \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.418368 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-swift-storage-0\") pod \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\" (UID: \"3a864762-5560-46b5-86ef-1ad6dd3adfa7\") " Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.433575 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a864762-5560-46b5-86ef-1ad6dd3adfa7-kube-api-access-2xhfs" (OuterVolumeSpecName: "kube-api-access-2xhfs") pod "3a864762-5560-46b5-86ef-1ad6dd3adfa7" (UID: "3a864762-5560-46b5-86ef-1ad6dd3adfa7"). InnerVolumeSpecName "kube-api-access-2xhfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.501618 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-config" (OuterVolumeSpecName: "config") pod "3a864762-5560-46b5-86ef-1ad6dd3adfa7" (UID: "3a864762-5560-46b5-86ef-1ad6dd3adfa7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.505222 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3a864762-5560-46b5-86ef-1ad6dd3adfa7" (UID: "3a864762-5560-46b5-86ef-1ad6dd3adfa7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.509273 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3a864762-5560-46b5-86ef-1ad6dd3adfa7" (UID: "3a864762-5560-46b5-86ef-1ad6dd3adfa7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.526527 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xhfs\" (UniqueName: \"kubernetes.io/projected/3a864762-5560-46b5-86ef-1ad6dd3adfa7-kube-api-access-2xhfs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.526564 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.526574 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.526582 4816 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.529548 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3a864762-5560-46b5-86ef-1ad6dd3adfa7" (UID: "3a864762-5560-46b5-86ef-1ad6dd3adfa7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.550299 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3a864762-5560-46b5-86ef-1ad6dd3adfa7" (UID: "3a864762-5560-46b5-86ef-1ad6dd3adfa7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.628971 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:39 crc kubenswrapper[4816]: I0216 13:23:39.628998 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a864762-5560-46b5-86ef-1ad6dd3adfa7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.299092 4816 generic.go:334] "Generic (PLEG): container finished" podID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerID="33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb" exitCode=0 Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.299158 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0","Type":"ContainerDied","Data":"33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb"} Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.302492 4816 generic.go:334] "Generic (PLEG): container finished" podID="e948086b-213d-4435-a751-c716f71b95f2" containerID="4c6c7ae0ae2a2aaf24f32900cba79dfeed3ae6f7ea3a213eed8d31fe5f2e53fd" exitCode=0 Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.302550 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6487d4d9b-cbp92" event={"ID":"e948086b-213d-4435-a751-c716f71b95f2","Type":"ContainerDied","Data":"4c6c7ae0ae2a2aaf24f32900cba79dfeed3ae6f7ea3a213eed8d31fe5f2e53fd"} Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.302587 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-6g2pf" Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.429964 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-6g2pf"] Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.450343 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-6g2pf"] Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.865232 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-bf8ff5468-dcb5b_91213a5b-68c8-4220-81db-f6b5f3ff324e/neutron-api/0.log" Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.865566 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.972087 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qc9j\" (UniqueName: \"kubernetes.io/projected/91213a5b-68c8-4220-81db-f6b5f3ff324e-kube-api-access-6qc9j\") pod \"91213a5b-68c8-4220-81db-f6b5f3ff324e\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.972170 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-combined-ca-bundle\") pod \"91213a5b-68c8-4220-81db-f6b5f3ff324e\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.972232 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-ovndb-tls-certs\") pod \"91213a5b-68c8-4220-81db-f6b5f3ff324e\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.972262 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-config\") pod \"91213a5b-68c8-4220-81db-f6b5f3ff324e\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.972279 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-httpd-config\") pod \"91213a5b-68c8-4220-81db-f6b5f3ff324e\" (UID: \"91213a5b-68c8-4220-81db-f6b5f3ff324e\") " Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.977932 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "91213a5b-68c8-4220-81db-f6b5f3ff324e" (UID: "91213a5b-68c8-4220-81db-f6b5f3ff324e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:40 crc kubenswrapper[4816]: I0216 13:23:40.978613 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91213a5b-68c8-4220-81db-f6b5f3ff324e-kube-api-access-6qc9j" (OuterVolumeSpecName: "kube-api-access-6qc9j") pod "91213a5b-68c8-4220-81db-f6b5f3ff324e" (UID: "91213a5b-68c8-4220-81db-f6b5f3ff324e"). InnerVolumeSpecName "kube-api-access-6qc9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.046965 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "91213a5b-68c8-4220-81db-f6b5f3ff324e" (UID: "91213a5b-68c8-4220-81db-f6b5f3ff324e"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.047876 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-config" (OuterVolumeSpecName: "config") pod "91213a5b-68c8-4220-81db-f6b5f3ff324e" (UID: "91213a5b-68c8-4220-81db-f6b5f3ff324e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.056827 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91213a5b-68c8-4220-81db-f6b5f3ff324e" (UID: "91213a5b-68c8-4220-81db-f6b5f3ff324e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.074258 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qc9j\" (UniqueName: \"kubernetes.io/projected/91213a5b-68c8-4220-81db-f6b5f3ff324e-kube-api-access-6qc9j\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.074294 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.074302 4816 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.074311 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.074325 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91213a5b-68c8-4220-81db-f6b5f3ff324e-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.317466 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-bf8ff5468-dcb5b_91213a5b-68c8-4220-81db-f6b5f3ff324e/neutron-api/0.log" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.317526 4816 generic.go:334] "Generic (PLEG): container finished" podID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerID="e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a" exitCode=137 Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.317560 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bf8ff5468-dcb5b" event={"ID":"91213a5b-68c8-4220-81db-f6b5f3ff324e","Type":"ContainerDied","Data":"e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a"} Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.317598 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bf8ff5468-dcb5b" event={"ID":"91213a5b-68c8-4220-81db-f6b5f3ff324e","Type":"ContainerDied","Data":"c9f347a26158777e4e8f0befc7879356d6cab8fce442030978ce8cca8e660a69"} Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.317616 4816 scope.go:117] "RemoveContainer" containerID="847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.317611 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bf8ff5468-dcb5b" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.344793 4816 scope.go:117] "RemoveContainer" containerID="e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.356855 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-bf8ff5468-dcb5b"] Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.369151 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-bf8ff5468-dcb5b"] Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.373384 4816 scope.go:117] "RemoveContainer" containerID="847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6" Feb 16 13:23:41 crc kubenswrapper[4816]: E0216 13:23:41.380370 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6\": container with ID starting with 847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6 not found: ID does not exist" containerID="847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.380418 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6"} err="failed to get container status \"847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6\": rpc error: code = NotFound desc = could not find container \"847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6\": container with ID starting with 847abe2048571274099afd856d360a9b98f5256ac5146d5a8e2948fd808a09d6 not found: ID does not exist" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.380446 4816 scope.go:117] "RemoveContainer" containerID="e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a" Feb 16 13:23:41 crc kubenswrapper[4816]: E0216 13:23:41.381941 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a\": container with ID starting with e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a not found: ID does not exist" containerID="e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.381973 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a"} err="failed to get container status \"e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a\": rpc error: code = NotFound desc = could not find container \"e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a\": container with ID starting with e0c50ec17de43e11e76289e20f3d43a65b56679f1d43de91924190b7e993a41a not found: ID does not exist" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.412267 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a864762-5560-46b5-86ef-1ad6dd3adfa7" path="/var/lib/kubelet/pods/3a864762-5560-46b5-86ef-1ad6dd3adfa7/volumes" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.413788 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" path="/var/lib/kubelet/pods/91213a5b-68c8-4220-81db-f6b5f3ff324e/volumes" Feb 16 13:23:41 crc kubenswrapper[4816]: I0216 13:23:41.923267 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.090813 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-config-data\") pod \"1c830726-778d-4471-98f0-abe404146440\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.090977 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-combined-ca-bundle\") pod \"1c830726-778d-4471-98f0-abe404146440\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.091030 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-public-tls-certs\") pod \"1c830726-778d-4471-98f0-abe404146440\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.091057 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-internal-tls-certs\") pod \"1c830726-778d-4471-98f0-abe404146440\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.091108 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmhlv\" (UniqueName: \"kubernetes.io/projected/1c830726-778d-4471-98f0-abe404146440-kube-api-access-mmhlv\") pod \"1c830726-778d-4471-98f0-abe404146440\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.091207 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-scripts\") pod \"1c830726-778d-4471-98f0-abe404146440\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.091259 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c830726-778d-4471-98f0-abe404146440-logs\") pod \"1c830726-778d-4471-98f0-abe404146440\" (UID: \"1c830726-778d-4471-98f0-abe404146440\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.092269 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c830726-778d-4471-98f0-abe404146440-logs" (OuterVolumeSpecName: "logs") pod "1c830726-778d-4471-98f0-abe404146440" (UID: "1c830726-778d-4471-98f0-abe404146440"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.101222 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c830726-778d-4471-98f0-abe404146440-kube-api-access-mmhlv" (OuterVolumeSpecName: "kube-api-access-mmhlv") pod "1c830726-778d-4471-98f0-abe404146440" (UID: "1c830726-778d-4471-98f0-abe404146440"). InnerVolumeSpecName "kube-api-access-mmhlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.103843 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-scripts" (OuterVolumeSpecName: "scripts") pod "1c830726-778d-4471-98f0-abe404146440" (UID: "1c830726-778d-4471-98f0-abe404146440"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.164816 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-config-data" (OuterVolumeSpecName: "config-data") pod "1c830726-778d-4471-98f0-abe404146440" (UID: "1c830726-778d-4471-98f0-abe404146440"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.165146 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c830726-778d-4471-98f0-abe404146440" (UID: "1c830726-778d-4471-98f0-abe404146440"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.193120 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmhlv\" (UniqueName: \"kubernetes.io/projected/1c830726-778d-4471-98f0-abe404146440-kube-api-access-mmhlv\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.193156 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.193166 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c830726-778d-4471-98f0-abe404146440-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.193174 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.193184 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.219524 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.256501 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1c830726-778d-4471-98f0-abe404146440" (UID: "1c830726-778d-4471-98f0-abe404146440"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.271302 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1c830726-778d-4471-98f0-abe404146440" (UID: "1c830726-778d-4471-98f0-abe404146440"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.295016 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.295070 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c830726-778d-4471-98f0-abe404146440-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.327464 4816 generic.go:334] "Generic (PLEG): container finished" podID="1c830726-778d-4471-98f0-abe404146440" containerID="8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8" exitCode=0 Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.327511 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55b8c8fc68-7vq25" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.327550 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55b8c8fc68-7vq25" event={"ID":"1c830726-778d-4471-98f0-abe404146440","Type":"ContainerDied","Data":"8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8"} Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.327596 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55b8c8fc68-7vq25" event={"ID":"1c830726-778d-4471-98f0-abe404146440","Type":"ContainerDied","Data":"6b5b210f6292d1ae6d3755c1d2555206763b0b6a4ca8ae236b4c3d7c56803e76"} Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.327615 4816 scope.go:117] "RemoveContainer" containerID="8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.329886 4816 generic.go:334] "Generic (PLEG): container finished" podID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerID="07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9" exitCode=0 Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.329950 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0","Type":"ContainerDied","Data":"07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9"} Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.329976 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0","Type":"ContainerDied","Data":"690077c3cdb41f1c26fcf6a07e7f213e3ac043b5dcf12b9ca6d81b1c6a5ba45f"} Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.330034 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.378815 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-55b8c8fc68-7vq25"] Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.386795 4816 scope.go:117] "RemoveContainer" containerID="5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.394218 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-55b8c8fc68-7vq25"] Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.396499 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-etc-machine-id\") pod \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.396565 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-combined-ca-bundle\") pod \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.396578 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" (UID: "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.396599 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data\") pod \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.396696 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data-custom\") pod \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.396765 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpvvb\" (UniqueName: \"kubernetes.io/projected/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-kube-api-access-wpvvb\") pod \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.396863 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-scripts\") pod \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\" (UID: \"01bbfd87-b2f6-4c91-8b62-96e46cce3ac0\") " Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.397505 4816 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.401069 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" (UID: "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.401635 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-scripts" (OuterVolumeSpecName: "scripts") pod "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" (UID: "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.407826 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-kube-api-access-wpvvb" (OuterVolumeSpecName: "kube-api-access-wpvvb") pod "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" (UID: "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0"). InnerVolumeSpecName "kube-api-access-wpvvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.454784 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" (UID: "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.492532 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data" (OuterVolumeSpecName: "config-data") pod "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" (UID: "01bbfd87-b2f6-4c91-8b62-96e46cce3ac0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.500212 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpvvb\" (UniqueName: \"kubernetes.io/projected/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-kube-api-access-wpvvb\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.500237 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.500247 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.500255 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.500264 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.559844 4816 scope.go:117] "RemoveContainer" containerID="8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.560443 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8\": container with ID starting with 8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8 not found: ID does not exist" containerID="8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.560486 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8"} err="failed to get container status \"8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8\": rpc error: code = NotFound desc = could not find container \"8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8\": container with ID starting with 8815649fe0580d15b74e279052b4034541ce3d09632d9d9c2b4af56f5345e2b8 not found: ID does not exist" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.560517 4816 scope.go:117] "RemoveContainer" containerID="5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.561118 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1\": container with ID starting with 5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1 not found: ID does not exist" containerID="5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.561141 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1"} err="failed to get container status \"5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1\": rpc error: code = NotFound desc = could not find container \"5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1\": container with ID starting with 5fd3b8b8419014f96b4ed5807b2c1ee9ff8acb4fd0de6dff3f3e8d80da6cb8c1 not found: ID does not exist" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.561158 4816 scope.go:117] "RemoveContainer" containerID="33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.586533 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.587143 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerName="cinder-scheduler" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587169 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerName="cinder-scheduler" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.587185 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerName="neutron-httpd" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587192 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerName="neutron-httpd" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.587201 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerName="probe" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587208 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerName="probe" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.587221 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a864762-5560-46b5-86ef-1ad6dd3adfa7" containerName="init" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587228 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a864762-5560-46b5-86ef-1ad6dd3adfa7" containerName="init" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.587259 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerName="neutron-api" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587265 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerName="neutron-api" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.587275 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c830726-778d-4471-98f0-abe404146440" containerName="placement-api" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587281 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c830726-778d-4471-98f0-abe404146440" containerName="placement-api" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.587294 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c830726-778d-4471-98f0-abe404146440" containerName="placement-log" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587299 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c830726-778d-4471-98f0-abe404146440" containerName="placement-log" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.587309 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a864762-5560-46b5-86ef-1ad6dd3adfa7" containerName="dnsmasq-dns" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587316 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a864762-5560-46b5-86ef-1ad6dd3adfa7" containerName="dnsmasq-dns" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587495 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c830726-778d-4471-98f0-abe404146440" containerName="placement-api" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587509 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerName="neutron-httpd" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587520 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="91213a5b-68c8-4220-81db-f6b5f3ff324e" containerName="neutron-api" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587531 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a864762-5560-46b5-86ef-1ad6dd3adfa7" containerName="dnsmasq-dns" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587541 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c830726-778d-4471-98f0-abe404146440" containerName="placement-log" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587551 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerName="probe" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.587561 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" containerName="cinder-scheduler" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.588239 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.592814 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.593701 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.594394 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-cwvb4" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.600944 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.602158 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config-secret\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.602243 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.602315 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.602403 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcvrv\" (UniqueName: \"kubernetes.io/projected/cca53be3-2b0f-4523-8fc4-d992bf72a13c-kube-api-access-tcvrv\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.624155 4816 scope.go:117] "RemoveContainer" containerID="07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.644154 4816 scope.go:117] "RemoveContainer" containerID="33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.651378 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb\": container with ID starting with 33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb not found: ID does not exist" containerID="33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.651436 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb"} err="failed to get container status \"33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb\": rpc error: code = NotFound desc = could not find container \"33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb\": container with ID starting with 33c9714427833958a18e05784988aaaa192e00db738629d574dbfe147a79f4fb not found: ID does not exist" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.651474 4816 scope.go:117] "RemoveContainer" containerID="07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9" Feb 16 13:23:42 crc kubenswrapper[4816]: E0216 13:23:42.652494 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9\": container with ID starting with 07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9 not found: ID does not exist" containerID="07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.652530 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9"} err="failed to get container status \"07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9\": rpc error: code = NotFound desc = could not find container \"07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9\": container with ID starting with 07e56bdbca7da6c9ee14931a4da6b120561f7387aa67fa285c11554ad05bb0a9 not found: ID does not exist" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.696961 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.708600 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.708702 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.708744 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcvrv\" (UniqueName: \"kubernetes.io/projected/cca53be3-2b0f-4523-8fc4-d992bf72a13c-kube-api-access-tcvrv\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.708808 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config-secret\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.710695 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.712606 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config-secret\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.723800 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.731103 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.738926 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.741227 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.742883 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcvrv\" (UniqueName: \"kubernetes.io/projected/cca53be3-2b0f-4523-8fc4-d992bf72a13c-kube-api-access-tcvrv\") pod \"openstackclient\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.747881 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.748203 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.813170 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.813271 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59470ba6-bdc1-455a-abeb-f0757dcba5f6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.813302 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8nw8\" (UniqueName: \"kubernetes.io/projected/59470ba6-bdc1-455a-abeb-f0757dcba5f6-kube-api-access-q8nw8\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.813604 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.813924 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.814196 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.916419 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.916513 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.916577 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.916616 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.916649 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59470ba6-bdc1-455a-abeb-f0757dcba5f6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.916698 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8nw8\" (UniqueName: \"kubernetes.io/projected/59470ba6-bdc1-455a-abeb-f0757dcba5f6-kube-api-access-q8nw8\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.917132 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59470ba6-bdc1-455a-abeb-f0757dcba5f6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.921293 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.921968 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.924577 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.930593 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.934555 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:42 crc kubenswrapper[4816]: I0216 13:23:42.939092 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8nw8\" (UniqueName: \"kubernetes.io/projected/59470ba6-bdc1-455a-abeb-f0757dcba5f6-kube-api-access-q8nw8\") pod \"cinder-scheduler-0\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " pod="openstack/cinder-scheduler-0" Feb 16 13:23:43 crc kubenswrapper[4816]: I0216 13:23:43.112079 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 13:23:43 crc kubenswrapper[4816]: I0216 13:23:43.417257 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01bbfd87-b2f6-4c91-8b62-96e46cce3ac0" path="/var/lib/kubelet/pods/01bbfd87-b2f6-4c91-8b62-96e46cce3ac0/volumes" Feb 16 13:23:43 crc kubenswrapper[4816]: I0216 13:23:43.418850 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c830726-778d-4471-98f0-abe404146440" path="/var/lib/kubelet/pods/1c830726-778d-4471-98f0-abe404146440/volumes" Feb 16 13:23:43 crc kubenswrapper[4816]: I0216 13:23:43.489986 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 16 13:23:43 crc kubenswrapper[4816]: W0216 13:23:43.491784 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcca53be3_2b0f_4523_8fc4_d992bf72a13c.slice/crio-ee1a6a2fb82dcf82fb87142fe06d6d4e62dd58aed5c78f877f8c25db62e94feb WatchSource:0}: Error finding container ee1a6a2fb82dcf82fb87142fe06d6d4e62dd58aed5c78f877f8c25db62e94feb: Status 404 returned error can't find the container with id ee1a6a2fb82dcf82fb87142fe06d6d4e62dd58aed5c78f877f8c25db62e94feb Feb 16 13:23:43 crc kubenswrapper[4816]: I0216 13:23:43.643878 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.370291 4816 generic.go:334] "Generic (PLEG): container finished" podID="e948086b-213d-4435-a751-c716f71b95f2" containerID="403d7d0e05eebee102b2d2a528727671b1ddf72a2a83a41898051db48d730e96" exitCode=0 Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.370569 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6487d4d9b-cbp92" event={"ID":"e948086b-213d-4435-a751-c716f71b95f2","Type":"ContainerDied","Data":"403d7d0e05eebee102b2d2a528727671b1ddf72a2a83a41898051db48d730e96"} Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.375106 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"59470ba6-bdc1-455a-abeb-f0757dcba5f6","Type":"ContainerStarted","Data":"18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807"} Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.375173 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"59470ba6-bdc1-455a-abeb-f0757dcba5f6","Type":"ContainerStarted","Data":"862d6bf0589f74958a908ba0bad11a5aa60dbe4a8a939cf373a7917129d0e4a6"} Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.400522 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"cca53be3-2b0f-4523-8fc4-d992bf72a13c","Type":"ContainerStarted","Data":"ee1a6a2fb82dcf82fb87142fe06d6d4e62dd58aed5c78f877f8c25db62e94feb"} Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.617346 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.759592 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-combined-ca-bundle\") pod \"e948086b-213d-4435-a751-c716f71b95f2\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.759874 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-config\") pod \"e948086b-213d-4435-a751-c716f71b95f2\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.759977 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-httpd-config\") pod \"e948086b-213d-4435-a751-c716f71b95f2\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.760019 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-ovndb-tls-certs\") pod \"e948086b-213d-4435-a751-c716f71b95f2\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.760181 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jb758\" (UniqueName: \"kubernetes.io/projected/e948086b-213d-4435-a751-c716f71b95f2-kube-api-access-jb758\") pod \"e948086b-213d-4435-a751-c716f71b95f2\" (UID: \"e948086b-213d-4435-a751-c716f71b95f2\") " Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.766561 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e948086b-213d-4435-a751-c716f71b95f2-kube-api-access-jb758" (OuterVolumeSpecName: "kube-api-access-jb758") pod "e948086b-213d-4435-a751-c716f71b95f2" (UID: "e948086b-213d-4435-a751-c716f71b95f2"). InnerVolumeSpecName "kube-api-access-jb758". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.766836 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "e948086b-213d-4435-a751-c716f71b95f2" (UID: "e948086b-213d-4435-a751-c716f71b95f2"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.824543 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-config" (OuterVolumeSpecName: "config") pod "e948086b-213d-4435-a751-c716f71b95f2" (UID: "e948086b-213d-4435-a751-c716f71b95f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.830619 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e948086b-213d-4435-a751-c716f71b95f2" (UID: "e948086b-213d-4435-a751-c716f71b95f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.862963 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.863000 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jb758\" (UniqueName: \"kubernetes.io/projected/e948086b-213d-4435-a751-c716f71b95f2-kube-api-access-jb758\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.863011 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.863020 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.871535 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "e948086b-213d-4435-a751-c716f71b95f2" (UID: "e948086b-213d-4435-a751-c716f71b95f2"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:44 crc kubenswrapper[4816]: I0216 13:23:44.964726 4816 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e948086b-213d-4435-a751-c716f71b95f2-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.031818 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-89f5bdcc-rdr9p"] Feb 16 13:23:45 crc kubenswrapper[4816]: E0216 13:23:45.032295 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e948086b-213d-4435-a751-c716f71b95f2" containerName="neutron-api" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.032344 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e948086b-213d-4435-a751-c716f71b95f2" containerName="neutron-api" Feb 16 13:23:45 crc kubenswrapper[4816]: E0216 13:23:45.032372 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e948086b-213d-4435-a751-c716f71b95f2" containerName="neutron-httpd" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.032383 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e948086b-213d-4435-a751-c716f71b95f2" containerName="neutron-httpd" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.032620 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e948086b-213d-4435-a751-c716f71b95f2" containerName="neutron-api" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.032652 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e948086b-213d-4435-a751-c716f71b95f2" containerName="neutron-httpd" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.039573 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.042145 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.042353 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.044491 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.050711 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-89f5bdcc-rdr9p"] Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.094316 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.169035 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-combined-ca-bundle\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.169087 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-run-httpd\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.169104 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-log-httpd\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.169139 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-config-data\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.169158 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-public-tls-certs\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.169181 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-etc-swift\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.169212 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbhtf\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-kube-api-access-dbhtf\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.169239 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-internal-tls-certs\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.270787 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-combined-ca-bundle\") pod \"a9c47f01-6045-4af2-82f5-3939a41029e6\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.270863 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qj4t\" (UniqueName: \"kubernetes.io/projected/a9c47f01-6045-4af2-82f5-3939a41029e6-kube-api-access-5qj4t\") pod \"a9c47f01-6045-4af2-82f5-3939a41029e6\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.270979 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data\") pod \"a9c47f01-6045-4af2-82f5-3939a41029e6\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.271025 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data-custom\") pod \"a9c47f01-6045-4af2-82f5-3939a41029e6\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.271896 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c47f01-6045-4af2-82f5-3939a41029e6-logs\") pod \"a9c47f01-6045-4af2-82f5-3939a41029e6\" (UID: \"a9c47f01-6045-4af2-82f5-3939a41029e6\") " Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272187 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-combined-ca-bundle\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272233 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-run-httpd\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272250 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-log-httpd\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272295 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-config-data\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272316 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-public-tls-certs\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272341 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9c47f01-6045-4af2-82f5-3939a41029e6-logs" (OuterVolumeSpecName: "logs") pod "a9c47f01-6045-4af2-82f5-3939a41029e6" (UID: "a9c47f01-6045-4af2-82f5-3939a41029e6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272353 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-etc-swift\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272434 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbhtf\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-kube-api-access-dbhtf\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272477 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-internal-tls-certs\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.272541 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c47f01-6045-4af2-82f5-3939a41029e6-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.273016 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-log-httpd\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.273303 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-run-httpd\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.288054 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-public-tls-certs\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.288183 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-internal-tls-certs\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.288418 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9c47f01-6045-4af2-82f5-3939a41029e6-kube-api-access-5qj4t" (OuterVolumeSpecName: "kube-api-access-5qj4t") pod "a9c47f01-6045-4af2-82f5-3939a41029e6" (UID: "a9c47f01-6045-4af2-82f5-3939a41029e6"). InnerVolumeSpecName "kube-api-access-5qj4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.288976 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-combined-ca-bundle\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.289360 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a9c47f01-6045-4af2-82f5-3939a41029e6" (UID: "a9c47f01-6045-4af2-82f5-3939a41029e6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.290520 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-config-data\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.291072 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-etc-swift\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.294386 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbhtf\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-kube-api-access-dbhtf\") pod \"swift-proxy-89f5bdcc-rdr9p\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.319559 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9c47f01-6045-4af2-82f5-3939a41029e6" (UID: "a9c47f01-6045-4af2-82f5-3939a41029e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.352436 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data" (OuterVolumeSpecName: "config-data") pod "a9c47f01-6045-4af2-82f5-3939a41029e6" (UID: "a9c47f01-6045-4af2-82f5-3939a41029e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.374762 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.374798 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.374808 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c47f01-6045-4af2-82f5-3939a41029e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.374817 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qj4t\" (UniqueName: \"kubernetes.io/projected/a9c47f01-6045-4af2-82f5-3939a41029e6-kube-api-access-5qj4t\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.379604 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.447205 4816 generic.go:334] "Generic (PLEG): container finished" podID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerID="a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc" exitCode=137 Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.447493 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.448688 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" event={"ID":"a9c47f01-6045-4af2-82f5-3939a41029e6","Type":"ContainerDied","Data":"a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc"} Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.448743 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67998cc688-ffntn" event={"ID":"a9c47f01-6045-4af2-82f5-3939a41029e6","Type":"ContainerDied","Data":"9495b88045b8a4545a5be37bbf03fdc38c00bb2b8f30e393d07040a828c23309"} Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.448762 4816 scope.go:117] "RemoveContainer" containerID="a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.458492 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6487d4d9b-cbp92" event={"ID":"e948086b-213d-4435-a751-c716f71b95f2","Type":"ContainerDied","Data":"920a3d840cff98e5e8379e0175789b4cd4acd9597c16a33e6646ecc9e729abba"} Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.458629 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6487d4d9b-cbp92" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.475088 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"59470ba6-bdc1-455a-abeb-f0757dcba5f6","Type":"ContainerStarted","Data":"934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e"} Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.510223 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.510203147 podStartE2EDuration="3.510203147s" podCreationTimestamp="2026-02-16 13:23:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:45.495235749 +0000 UTC m=+1224.821949477" watchObservedRunningTime="2026-02-16 13:23:45.510203147 +0000 UTC m=+1224.836916875" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.551960 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-67998cc688-ffntn"] Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.575365 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-67998cc688-ffntn"] Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.595191 4816 scope.go:117] "RemoveContainer" containerID="a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.623969 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6487d4d9b-cbp92"] Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.642240 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6487d4d9b-cbp92"] Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.679011 4816 scope.go:117] "RemoveContainer" containerID="a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc" Feb 16 13:23:45 crc kubenswrapper[4816]: E0216 13:23:45.684701 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc\": container with ID starting with a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc not found: ID does not exist" containerID="a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.684762 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc"} err="failed to get container status \"a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc\": rpc error: code = NotFound desc = could not find container \"a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc\": container with ID starting with a263de6333f1601186aea3a076f7e66e47d59247a80087ac3dd181a7931512bc not found: ID does not exist" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.684804 4816 scope.go:117] "RemoveContainer" containerID="a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098" Feb 16 13:23:45 crc kubenswrapper[4816]: E0216 13:23:45.692101 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098\": container with ID starting with a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098 not found: ID does not exist" containerID="a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.692192 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098"} err="failed to get container status \"a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098\": rpc error: code = NotFound desc = could not find container \"a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098\": container with ID starting with a90d68112eb6515b95f3327bc3605c63f9dab3ce67f34f2e963dd9041599a098 not found: ID does not exist" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.692241 4816 scope.go:117] "RemoveContainer" containerID="4c6c7ae0ae2a2aaf24f32900cba79dfeed3ae6f7ea3a213eed8d31fe5f2e53fd" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.737847 4816 scope.go:117] "RemoveContainer" containerID="403d7d0e05eebee102b2d2a528727671b1ddf72a2a83a41898051db48d730e96" Feb 16 13:23:45 crc kubenswrapper[4816]: I0216 13:23:45.909038 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 16 13:23:46 crc kubenswrapper[4816]: I0216 13:23:46.108095 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-89f5bdcc-rdr9p"] Feb 16 13:23:46 crc kubenswrapper[4816]: I0216 13:23:46.480924 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:46 crc kubenswrapper[4816]: I0216 13:23:46.481582 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="ceilometer-central-agent" containerID="cri-o://4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325" gracePeriod=30 Feb 16 13:23:46 crc kubenswrapper[4816]: I0216 13:23:46.481948 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="sg-core" containerID="cri-o://625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db" gracePeriod=30 Feb 16 13:23:46 crc kubenswrapper[4816]: I0216 13:23:46.481987 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="ceilometer-notification-agent" containerID="cri-o://17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6" gracePeriod=30 Feb 16 13:23:46 crc kubenswrapper[4816]: I0216 13:23:46.482021 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="proxy-httpd" containerID="cri-o://d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac" gracePeriod=30 Feb 16 13:23:46 crc kubenswrapper[4816]: I0216 13:23:46.504262 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-89f5bdcc-rdr9p" event={"ID":"fd68bcb4-cb94-422a-b44a-7fd47d309f0a","Type":"ContainerStarted","Data":"6340a77ac99776aeb020b01976822fd26699f38ca39fa326120c029fa48e7a90"} Feb 16 13:23:46 crc kubenswrapper[4816]: I0216 13:23:46.512006 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.410014 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9c47f01-6045-4af2-82f5-3939a41029e6" path="/var/lib/kubelet/pods/a9c47f01-6045-4af2-82f5-3939a41029e6/volumes" Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.411613 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e948086b-213d-4435-a751-c716f71b95f2" path="/var/lib/kubelet/pods/e948086b-213d-4435-a751-c716f71b95f2/volumes" Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.523562 4816 generic.go:334] "Generic (PLEG): container finished" podID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerID="d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac" exitCode=0 Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.523621 4816 generic.go:334] "Generic (PLEG): container finished" podID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerID="625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db" exitCode=2 Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.523629 4816 generic.go:334] "Generic (PLEG): container finished" podID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerID="4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325" exitCode=0 Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.523684 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerDied","Data":"d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac"} Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.523710 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerDied","Data":"625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db"} Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.523720 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerDied","Data":"4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325"} Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.527179 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-89f5bdcc-rdr9p" event={"ID":"fd68bcb4-cb94-422a-b44a-7fd47d309f0a","Type":"ContainerStarted","Data":"b1b0de881416dfc3b1efa9b3cceea64ff96d9d1db02f7c7b5b1c9240e3757b44"} Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.527232 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-89f5bdcc-rdr9p" event={"ID":"fd68bcb4-cb94-422a-b44a-7fd47d309f0a","Type":"ContainerStarted","Data":"a8e282eef394bdb8b3559f783af4f640c6a7bb4f9d7e1a7ac0a8e8e3c4b0bafb"} Feb 16 13:23:47 crc kubenswrapper[4816]: I0216 13:23:47.527355 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.113089 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.469736 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.494552 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-89f5bdcc-rdr9p" podStartSLOduration=4.494533831 podStartE2EDuration="4.494533831s" podCreationTimestamp="2026-02-16 13:23:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:47.55237876 +0000 UTC m=+1226.879092508" watchObservedRunningTime="2026-02-16 13:23:48.494533831 +0000 UTC m=+1227.821247559" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.542949 4816 generic.go:334] "Generic (PLEG): container finished" podID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerID="17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6" exitCode=0 Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.543048 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.543060 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerDied","Data":"17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6"} Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.543232 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad6c02a5-54f8-43d0-9526-5933be302a31","Type":"ContainerDied","Data":"b3f1e83fa46d92886eb8e11af3dc4eaf2e6850c26d4cad038bcaf5c7ead981cf"} Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.543287 4816 scope.go:117] "RemoveContainer" containerID="d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.544052 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.548609 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-config-data\") pod \"ad6c02a5-54f8-43d0-9526-5933be302a31\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.548688 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-log-httpd\") pod \"ad6c02a5-54f8-43d0-9526-5933be302a31\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.548731 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-872ks\" (UniqueName: \"kubernetes.io/projected/ad6c02a5-54f8-43d0-9526-5933be302a31-kube-api-access-872ks\") pod \"ad6c02a5-54f8-43d0-9526-5933be302a31\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.548796 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-run-httpd\") pod \"ad6c02a5-54f8-43d0-9526-5933be302a31\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.548840 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-scripts\") pod \"ad6c02a5-54f8-43d0-9526-5933be302a31\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.548955 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-sg-core-conf-yaml\") pod \"ad6c02a5-54f8-43d0-9526-5933be302a31\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.549005 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-combined-ca-bundle\") pod \"ad6c02a5-54f8-43d0-9526-5933be302a31\" (UID: \"ad6c02a5-54f8-43d0-9526-5933be302a31\") " Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.550916 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ad6c02a5-54f8-43d0-9526-5933be302a31" (UID: "ad6c02a5-54f8-43d0-9526-5933be302a31"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.551447 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ad6c02a5-54f8-43d0-9526-5933be302a31" (UID: "ad6c02a5-54f8-43d0-9526-5933be302a31"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.555859 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad6c02a5-54f8-43d0-9526-5933be302a31-kube-api-access-872ks" (OuterVolumeSpecName: "kube-api-access-872ks") pod "ad6c02a5-54f8-43d0-9526-5933be302a31" (UID: "ad6c02a5-54f8-43d0-9526-5933be302a31"). InnerVolumeSpecName "kube-api-access-872ks". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.570971 4816 scope.go:117] "RemoveContainer" containerID="625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.573588 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-scripts" (OuterVolumeSpecName: "scripts") pod "ad6c02a5-54f8-43d0-9526-5933be302a31" (UID: "ad6c02a5-54f8-43d0-9526-5933be302a31"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.592068 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ad6c02a5-54f8-43d0-9526-5933be302a31" (UID: "ad6c02a5-54f8-43d0-9526-5933be302a31"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.633836 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad6c02a5-54f8-43d0-9526-5933be302a31" (UID: "ad6c02a5-54f8-43d0-9526-5933be302a31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.651447 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.651486 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.651499 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.651510 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-872ks\" (UniqueName: \"kubernetes.io/projected/ad6c02a5-54f8-43d0-9526-5933be302a31-kube-api-access-872ks\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.651522 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad6c02a5-54f8-43d0-9526-5933be302a31-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.651532 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.662448 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-config-data" (OuterVolumeSpecName: "config-data") pod "ad6c02a5-54f8-43d0-9526-5933be302a31" (UID: "ad6c02a5-54f8-43d0-9526-5933be302a31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.723851 4816 scope.go:117] "RemoveContainer" containerID="17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.749126 4816 scope.go:117] "RemoveContainer" containerID="4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.752745 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad6c02a5-54f8-43d0-9526-5933be302a31-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.769162 4816 scope.go:117] "RemoveContainer" containerID="d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac" Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.770049 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac\": container with ID starting with d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac not found: ID does not exist" containerID="d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.770109 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac"} err="failed to get container status \"d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac\": rpc error: code = NotFound desc = could not find container \"d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac\": container with ID starting with d28e513e9b6a6d5aa2a5d4cd3f143ad0abc5df14e714edb4389395e2cec369ac not found: ID does not exist" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.770151 4816 scope.go:117] "RemoveContainer" containerID="625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db" Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.770528 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db\": container with ID starting with 625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db not found: ID does not exist" containerID="625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.770559 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db"} err="failed to get container status \"625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db\": rpc error: code = NotFound desc = could not find container \"625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db\": container with ID starting with 625a23340aee1a2a14671e40ab44c29144367dda635c7095fdc7363fa2a3d1db not found: ID does not exist" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.770582 4816 scope.go:117] "RemoveContainer" containerID="17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6" Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.770985 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6\": container with ID starting with 17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6 not found: ID does not exist" containerID="17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.771013 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6"} err="failed to get container status \"17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6\": rpc error: code = NotFound desc = could not find container \"17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6\": container with ID starting with 17b8733a2828bd60a6ef46145f3ef03d6b33d04a81fee1b5d22eff5b7fd4e8f6 not found: ID does not exist" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.771030 4816 scope.go:117] "RemoveContainer" containerID="4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325" Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.771252 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325\": container with ID starting with 4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325 not found: ID does not exist" containerID="4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.771274 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325"} err="failed to get container status \"4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325\": rpc error: code = NotFound desc = could not find container \"4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325\": container with ID starting with 4fd3bfedacde515c797488608a050d29f53d7e4cbfdea86ef3783053f5462325 not found: ID does not exist" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.879576 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.895621 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.911520 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.912846 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerName="barbican-keystone-listener" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.912866 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerName="barbican-keystone-listener" Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.912893 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="ceilometer-central-agent" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.912900 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="ceilometer-central-agent" Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.912914 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="sg-core" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.912919 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="sg-core" Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.912928 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="ceilometer-notification-agent" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.912933 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="ceilometer-notification-agent" Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.912941 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerName="barbican-keystone-listener-log" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.912946 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerName="barbican-keystone-listener-log" Feb 16 13:23:48 crc kubenswrapper[4816]: E0216 13:23:48.912957 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="proxy-httpd" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.912964 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="proxy-httpd" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.913146 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerName="barbican-keystone-listener-log" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.913163 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="sg-core" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.913179 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="ceilometer-central-agent" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.913193 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9c47f01-6045-4af2-82f5-3939a41029e6" containerName="barbican-keystone-listener" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.913200 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="proxy-httpd" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.913207 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" containerName="ceilometer-notification-agent" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.930330 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.930461 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.933770 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 13:23:48 crc kubenswrapper[4816]: I0216 13:23:48.933991 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.059973 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.060031 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-log-httpd\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.060052 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-config-data\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.060091 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-run-httpd\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.060111 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-scripts\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.060146 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk268\" (UniqueName: \"kubernetes.io/projected/f39547de-3b98-4e11-9ef8-2e3744b82e23-kube-api-access-dk268\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.060178 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.161412 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.161471 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-log-httpd\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.161491 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-config-data\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.161537 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-run-httpd\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.161555 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-scripts\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.161589 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk268\" (UniqueName: \"kubernetes.io/projected/f39547de-3b98-4e11-9ef8-2e3744b82e23-kube-api-access-dk268\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.161624 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.162569 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-run-httpd\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.162937 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-log-httpd\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.170192 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.171120 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-scripts\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.174789 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-config-data\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.178504 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.190724 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk268\" (UniqueName: \"kubernetes.io/projected/f39547de-3b98-4e11-9ef8-2e3744b82e23-kube-api-access-dk268\") pod \"ceilometer-0\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.262213 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:23:49 crc kubenswrapper[4816]: I0216 13:23:49.412369 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad6c02a5-54f8-43d0-9526-5933be302a31" path="/var/lib/kubelet/pods/ad6c02a5-54f8-43d0-9526-5933be302a31/volumes" Feb 16 13:23:52 crc kubenswrapper[4816]: I0216 13:23:52.793969 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:53 crc kubenswrapper[4816]: I0216 13:23:53.317383 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 16 13:23:55 crc kubenswrapper[4816]: I0216 13:23:55.247006 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:23:55 crc kubenswrapper[4816]: I0216 13:23:55.389436 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:55 crc kubenswrapper[4816]: I0216 13:23:55.390094 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:23:55 crc kubenswrapper[4816]: I0216 13:23:55.630806 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"cca53be3-2b0f-4523-8fc4-d992bf72a13c","Type":"ContainerStarted","Data":"cad7db43bacb78d56110ccb724697599cb15ce0faf3ee895f3eca64fe020fab6"} Feb 16 13:23:55 crc kubenswrapper[4816]: I0216 13:23:55.632307 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerStarted","Data":"840ee6dd2cbe2bf959925ff45085f1364c463b72cdfa7785356d4ecd9c5a38d2"} Feb 16 13:23:55 crc kubenswrapper[4816]: I0216 13:23:55.654485 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.154355518 podStartE2EDuration="13.654466153s" podCreationTimestamp="2026-02-16 13:23:42 +0000 UTC" firstStartedPulling="2026-02-16 13:23:43.495783182 +0000 UTC m=+1222.822496910" lastFinishedPulling="2026-02-16 13:23:54.995893807 +0000 UTC m=+1234.322607545" observedRunningTime="2026-02-16 13:23:55.646263969 +0000 UTC m=+1234.972977717" watchObservedRunningTime="2026-02-16 13:23:55.654466153 +0000 UTC m=+1234.981179881" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.537105 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-twj49"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.538756 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-twj49" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.547607 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-twj49"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.626970 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bqhp\" (UniqueName: \"kubernetes.io/projected/778f3898-c77a-4905-9c88-4f0222c75817-kube-api-access-2bqhp\") pod \"nova-api-db-create-twj49\" (UID: \"778f3898-c77a-4905-9c88-4f0222c75817\") " pod="openstack/nova-api-db-create-twj49" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.627080 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3898-c77a-4905-9c88-4f0222c75817-operator-scripts\") pod \"nova-api-db-create-twj49\" (UID: \"778f3898-c77a-4905-9c88-4f0222c75817\") " pod="openstack/nova-api-db-create-twj49" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.647030 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-1245-account-create-update-tbtlc"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.648270 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.657814 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.665878 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-z4v4v"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.678809 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1245-account-create-update-tbtlc"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.678921 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.687737 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-z4v4v"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.729105 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn6sv\" (UniqueName: \"kubernetes.io/projected/f64f8bcc-d25f-4799-b916-7604027ba614-kube-api-access-qn6sv\") pod \"nova-api-1245-account-create-update-tbtlc\" (UID: \"f64f8bcc-d25f-4799-b916-7604027ba614\") " pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.729246 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bqhp\" (UniqueName: \"kubernetes.io/projected/778f3898-c77a-4905-9c88-4f0222c75817-kube-api-access-2bqhp\") pod \"nova-api-db-create-twj49\" (UID: \"778f3898-c77a-4905-9c88-4f0222c75817\") " pod="openstack/nova-api-db-create-twj49" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.729367 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3898-c77a-4905-9c88-4f0222c75817-operator-scripts\") pod \"nova-api-db-create-twj49\" (UID: \"778f3898-c77a-4905-9c88-4f0222c75817\") " pod="openstack/nova-api-db-create-twj49" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.729403 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f64f8bcc-d25f-4799-b916-7604027ba614-operator-scripts\") pod \"nova-api-1245-account-create-update-tbtlc\" (UID: \"f64f8bcc-d25f-4799-b916-7604027ba614\") " pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.795348 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3898-c77a-4905-9c88-4f0222c75817-operator-scripts\") pod \"nova-api-db-create-twj49\" (UID: \"778f3898-c77a-4905-9c88-4f0222c75817\") " pod="openstack/nova-api-db-create-twj49" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.831502 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn6sv\" (UniqueName: \"kubernetes.io/projected/f64f8bcc-d25f-4799-b916-7604027ba614-kube-api-access-qn6sv\") pod \"nova-api-1245-account-create-update-tbtlc\" (UID: \"f64f8bcc-d25f-4799-b916-7604027ba614\") " pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.831827 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/119a94e7-cb72-4388-b3f0-78d2de19889f-operator-scripts\") pod \"nova-cell0-db-create-z4v4v\" (UID: \"119a94e7-cb72-4388-b3f0-78d2de19889f\") " pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.831860 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz4sx\" (UniqueName: \"kubernetes.io/projected/119a94e7-cb72-4388-b3f0-78d2de19889f-kube-api-access-nz4sx\") pod \"nova-cell0-db-create-z4v4v\" (UID: \"119a94e7-cb72-4388-b3f0-78d2de19889f\") " pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.832009 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f64f8bcc-d25f-4799-b916-7604027ba614-operator-scripts\") pod \"nova-api-1245-account-create-update-tbtlc\" (UID: \"f64f8bcc-d25f-4799-b916-7604027ba614\") " pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.832950 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f64f8bcc-d25f-4799-b916-7604027ba614-operator-scripts\") pod \"nova-api-1245-account-create-update-tbtlc\" (UID: \"f64f8bcc-d25f-4799-b916-7604027ba614\") " pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.838706 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-z8cp5"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.839786 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.863966 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-z8cp5"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.864582 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bqhp\" (UniqueName: \"kubernetes.io/projected/778f3898-c77a-4905-9c88-4f0222c75817-kube-api-access-2bqhp\") pod \"nova-api-db-create-twj49\" (UID: \"778f3898-c77a-4905-9c88-4f0222c75817\") " pod="openstack/nova-api-db-create-twj49" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.866123 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn6sv\" (UniqueName: \"kubernetes.io/projected/f64f8bcc-d25f-4799-b916-7604027ba614-kube-api-access-qn6sv\") pod \"nova-api-1245-account-create-update-tbtlc\" (UID: \"f64f8bcc-d25f-4799-b916-7604027ba614\") " pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.900558 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-5ljmf"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.902289 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.904296 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.913910 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-5ljmf"] Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.933228 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/119a94e7-cb72-4388-b3f0-78d2de19889f-operator-scripts\") pod \"nova-cell0-db-create-z4v4v\" (UID: \"119a94e7-cb72-4388-b3f0-78d2de19889f\") " pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.933285 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz4sx\" (UniqueName: \"kubernetes.io/projected/119a94e7-cb72-4388-b3f0-78d2de19889f-kube-api-access-nz4sx\") pod \"nova-cell0-db-create-z4v4v\" (UID: \"119a94e7-cb72-4388-b3f0-78d2de19889f\") " pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.933381 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c13d2f68-a630-4bfc-a909-0c83418e55bc-operator-scripts\") pod \"nova-cell1-db-create-z8cp5\" (UID: \"c13d2f68-a630-4bfc-a909-0c83418e55bc\") " pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.933496 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8tbh\" (UniqueName: \"kubernetes.io/projected/c13d2f68-a630-4bfc-a909-0c83418e55bc-kube-api-access-h8tbh\") pod \"nova-cell1-db-create-z8cp5\" (UID: \"c13d2f68-a630-4bfc-a909-0c83418e55bc\") " pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.934693 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/119a94e7-cb72-4388-b3f0-78d2de19889f-operator-scripts\") pod \"nova-cell0-db-create-z4v4v\" (UID: \"119a94e7-cb72-4388-b3f0-78d2de19889f\") " pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:23:56 crc kubenswrapper[4816]: I0216 13:23:56.952520 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz4sx\" (UniqueName: \"kubernetes.io/projected/119a94e7-cb72-4388-b3f0-78d2de19889f-kube-api-access-nz4sx\") pod \"nova-cell0-db-create-z4v4v\" (UID: \"119a94e7-cb72-4388-b3f0-78d2de19889f\") " pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.035638 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-operator-scripts\") pod \"nova-cell0-d27b-account-create-update-5ljmf\" (UID: \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\") " pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.035761 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8tbh\" (UniqueName: \"kubernetes.io/projected/c13d2f68-a630-4bfc-a909-0c83418e55bc-kube-api-access-h8tbh\") pod \"nova-cell1-db-create-z8cp5\" (UID: \"c13d2f68-a630-4bfc-a909-0c83418e55bc\") " pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.035894 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcqfd\" (UniqueName: \"kubernetes.io/projected/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-kube-api-access-tcqfd\") pod \"nova-cell0-d27b-account-create-update-5ljmf\" (UID: \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\") " pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.035926 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c13d2f68-a630-4bfc-a909-0c83418e55bc-operator-scripts\") pod \"nova-cell1-db-create-z8cp5\" (UID: \"c13d2f68-a630-4bfc-a909-0c83418e55bc\") " pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.036838 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c13d2f68-a630-4bfc-a909-0c83418e55bc-operator-scripts\") pod \"nova-cell1-db-create-z8cp5\" (UID: \"c13d2f68-a630-4bfc-a909-0c83418e55bc\") " pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.066310 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-blhnm"] Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.067732 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.067752 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8tbh\" (UniqueName: \"kubernetes.io/projected/c13d2f68-a630-4bfc-a909-0c83418e55bc-kube-api-access-h8tbh\") pod \"nova-cell1-db-create-z8cp5\" (UID: \"c13d2f68-a630-4bfc-a909-0c83418e55bc\") " pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.069939 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.096907 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.097689 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-blhnm"] Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.111352 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.137268 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns5nj\" (UniqueName: \"kubernetes.io/projected/049f93f8-ed00-4331-b932-1e4f98ffe8c2-kube-api-access-ns5nj\") pod \"nova-cell1-c4fc-account-create-update-blhnm\" (UID: \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\") " pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.137344 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/049f93f8-ed00-4331-b932-1e4f98ffe8c2-operator-scripts\") pod \"nova-cell1-c4fc-account-create-update-blhnm\" (UID: \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\") " pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.137385 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcqfd\" (UniqueName: \"kubernetes.io/projected/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-kube-api-access-tcqfd\") pod \"nova-cell0-d27b-account-create-update-5ljmf\" (UID: \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\") " pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.137464 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-operator-scripts\") pod \"nova-cell0-d27b-account-create-update-5ljmf\" (UID: \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\") " pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.138523 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-operator-scripts\") pod \"nova-cell0-d27b-account-create-update-5ljmf\" (UID: \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\") " pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.158364 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-twj49" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.161873 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcqfd\" (UniqueName: \"kubernetes.io/projected/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-kube-api-access-tcqfd\") pod \"nova-cell0-d27b-account-create-update-5ljmf\" (UID: \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\") " pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.308021 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.308197 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.309390 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns5nj\" (UniqueName: \"kubernetes.io/projected/049f93f8-ed00-4331-b932-1e4f98ffe8c2-kube-api-access-ns5nj\") pod \"nova-cell1-c4fc-account-create-update-blhnm\" (UID: \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\") " pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.309502 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/049f93f8-ed00-4331-b932-1e4f98ffe8c2-operator-scripts\") pod \"nova-cell1-c4fc-account-create-update-blhnm\" (UID: \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\") " pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.310437 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/049f93f8-ed00-4331-b932-1e4f98ffe8c2-operator-scripts\") pod \"nova-cell1-c4fc-account-create-update-blhnm\" (UID: \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\") " pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.328206 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns5nj\" (UniqueName: \"kubernetes.io/projected/049f93f8-ed00-4331-b932-1e4f98ffe8c2-kube-api-access-ns5nj\") pod \"nova-cell1-c4fc-account-create-update-blhnm\" (UID: \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\") " pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.443941 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.691741 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerStarted","Data":"661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01"} Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.747450 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1245-account-create-update-tbtlc"] Feb 16 13:23:57 crc kubenswrapper[4816]: I0216 13:23:57.974137 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-twj49"] Feb 16 13:23:57 crc kubenswrapper[4816]: W0216 13:23:57.984600 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod778f3898_c77a_4905_9c88_4f0222c75817.slice/crio-0b1e863f043d475527603c8bc56b719e9424199254b2b0fb024191228d0b4aff WatchSource:0}: Error finding container 0b1e863f043d475527603c8bc56b719e9424199254b2b0fb024191228d0b4aff: Status 404 returned error can't find the container with id 0b1e863f043d475527603c8bc56b719e9424199254b2b0fb024191228d0b4aff Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.082461 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-z4v4v"] Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.248186 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-z8cp5"] Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.257380 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-5ljmf"] Feb 16 13:23:58 crc kubenswrapper[4816]: W0216 13:23:58.257876 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc13d2f68_a630_4bfc_a909_0c83418e55bc.slice/crio-d1705c2ab1ffce5f9a17cd8021955051e6e0eb3f87e607a9d1fcc8369516b25a WatchSource:0}: Error finding container d1705c2ab1ffce5f9a17cd8021955051e6e0eb3f87e607a9d1fcc8369516b25a: Status 404 returned error can't find the container with id d1705c2ab1ffce5f9a17cd8021955051e6e0eb3f87e607a9d1fcc8369516b25a Feb 16 13:23:58 crc kubenswrapper[4816]: W0216 13:23:58.260458 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaeb7d949_8e61_4bfa_8b02_0213a7861b9f.slice/crio-5bc5f598811cafeb6fe28af7d0a20623e601986f53c326dd4323507552096b94 WatchSource:0}: Error finding container 5bc5f598811cafeb6fe28af7d0a20623e601986f53c326dd4323507552096b94: Status 404 returned error can't find the container with id 5bc5f598811cafeb6fe28af7d0a20623e601986f53c326dd4323507552096b94 Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.303518 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-blhnm"] Feb 16 13:23:58 crc kubenswrapper[4816]: W0216 13:23:58.323795 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod049f93f8_ed00_4331_b932_1e4f98ffe8c2.slice/crio-29c2c7de7081a84214c9b9ea1d7694d4ac648f44c9e49fffa8c75ce0b678b71a WatchSource:0}: Error finding container 29c2c7de7081a84214c9b9ea1d7694d4ac648f44c9e49fffa8c75ce0b678b71a: Status 404 returned error can't find the container with id 29c2c7de7081a84214c9b9ea1d7694d4ac648f44c9e49fffa8c75ce0b678b71a Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.720109 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerStarted","Data":"60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.730362 4816 generic.go:334] "Generic (PLEG): container finished" podID="119a94e7-cb72-4388-b3f0-78d2de19889f" containerID="027e08dafffb74262275075dc8c9d08656e18039ee81855c621d571ba9668edb" exitCode=0 Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.730823 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-z4v4v" event={"ID":"119a94e7-cb72-4388-b3f0-78d2de19889f","Type":"ContainerDied","Data":"027e08dafffb74262275075dc8c9d08656e18039ee81855c621d571ba9668edb"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.730869 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-z4v4v" event={"ID":"119a94e7-cb72-4388-b3f0-78d2de19889f","Type":"ContainerStarted","Data":"70df48aa4f167e9cdbc4ae67f30240ca0813136358d27ecc5a6399cf59db7088"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.738994 4816 generic.go:334] "Generic (PLEG): container finished" podID="f64f8bcc-d25f-4799-b916-7604027ba614" containerID="8a476b5ff55d4104e20d7f1bdc31c6ef74cfa237527ee4844d863aa85dd3bdb1" exitCode=0 Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.739130 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1245-account-create-update-tbtlc" event={"ID":"f64f8bcc-d25f-4799-b916-7604027ba614","Type":"ContainerDied","Data":"8a476b5ff55d4104e20d7f1bdc31c6ef74cfa237527ee4844d863aa85dd3bdb1"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.739169 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1245-account-create-update-tbtlc" event={"ID":"f64f8bcc-d25f-4799-b916-7604027ba614","Type":"ContainerStarted","Data":"837126632a601b8ee29c1da7d607ddefb258cdc0a8c74b23f71e5a342274b60b"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.741089 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-z8cp5" event={"ID":"c13d2f68-a630-4bfc-a909-0c83418e55bc","Type":"ContainerStarted","Data":"96a26d4a869ea29f2d4c6578c22ea996c7dcf18bdf02530d972d1e05dfd28823"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.741126 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-z8cp5" event={"ID":"c13d2f68-a630-4bfc-a909-0c83418e55bc","Type":"ContainerStarted","Data":"d1705c2ab1ffce5f9a17cd8021955051e6e0eb3f87e607a9d1fcc8369516b25a"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.742330 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" event={"ID":"049f93f8-ed00-4331-b932-1e4f98ffe8c2","Type":"ContainerStarted","Data":"24d21dd10427dd9ed85ee969b127ac599b8e7432a15c1f720ab6ea7f8f1c0d2b"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.742363 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" event={"ID":"049f93f8-ed00-4331-b932-1e4f98ffe8c2","Type":"ContainerStarted","Data":"29c2c7de7081a84214c9b9ea1d7694d4ac648f44c9e49fffa8c75ce0b678b71a"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.750738 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" event={"ID":"aeb7d949-8e61-4bfa-8b02-0213a7861b9f","Type":"ContainerStarted","Data":"474929eac836803381bab25d53cae980ee7dbac3ad8d6c4038f390f093bca57e"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.750791 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" event={"ID":"aeb7d949-8e61-4bfa-8b02-0213a7861b9f","Type":"ContainerStarted","Data":"5bc5f598811cafeb6fe28af7d0a20623e601986f53c326dd4323507552096b94"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.756038 4816 generic.go:334] "Generic (PLEG): container finished" podID="778f3898-c77a-4905-9c88-4f0222c75817" containerID="d4e585d8c04d34ad4f94e9d269ca13f9a08aa393622c9cc919a2c7459810db9a" exitCode=0 Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.756101 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-twj49" event={"ID":"778f3898-c77a-4905-9c88-4f0222c75817","Type":"ContainerDied","Data":"d4e585d8c04d34ad4f94e9d269ca13f9a08aa393622c9cc919a2c7459810db9a"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.756168 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-twj49" event={"ID":"778f3898-c77a-4905-9c88-4f0222c75817","Type":"ContainerStarted","Data":"0b1e863f043d475527603c8bc56b719e9424199254b2b0fb024191228d0b4aff"} Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.792112 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" podStartSLOduration=1.792084743 podStartE2EDuration="1.792084743s" podCreationTimestamp="2026-02-16 13:23:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:58.773793033 +0000 UTC m=+1238.100506981" watchObservedRunningTime="2026-02-16 13:23:58.792084743 +0000 UTC m=+1238.118798471" Feb 16 13:23:58 crc kubenswrapper[4816]: I0216 13:23:58.862328 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" podStartSLOduration=2.86230635 podStartE2EDuration="2.86230635s" podCreationTimestamp="2026-02-16 13:23:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:23:58.858952399 +0000 UTC m=+1238.185666137" watchObservedRunningTime="2026-02-16 13:23:58.86230635 +0000 UTC m=+1238.189020078" Feb 16 13:23:59 crc kubenswrapper[4816]: I0216 13:23:59.769098 4816 generic.go:334] "Generic (PLEG): container finished" podID="049f93f8-ed00-4331-b932-1e4f98ffe8c2" containerID="24d21dd10427dd9ed85ee969b127ac599b8e7432a15c1f720ab6ea7f8f1c0d2b" exitCode=0 Feb 16 13:23:59 crc kubenswrapper[4816]: I0216 13:23:59.769161 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" event={"ID":"049f93f8-ed00-4331-b932-1e4f98ffe8c2","Type":"ContainerDied","Data":"24d21dd10427dd9ed85ee969b127ac599b8e7432a15c1f720ab6ea7f8f1c0d2b"} Feb 16 13:23:59 crc kubenswrapper[4816]: I0216 13:23:59.771902 4816 generic.go:334] "Generic (PLEG): container finished" podID="aeb7d949-8e61-4bfa-8b02-0213a7861b9f" containerID="474929eac836803381bab25d53cae980ee7dbac3ad8d6c4038f390f093bca57e" exitCode=0 Feb 16 13:23:59 crc kubenswrapper[4816]: I0216 13:23:59.771992 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" event={"ID":"aeb7d949-8e61-4bfa-8b02-0213a7861b9f","Type":"ContainerDied","Data":"474929eac836803381bab25d53cae980ee7dbac3ad8d6c4038f390f093bca57e"} Feb 16 13:23:59 crc kubenswrapper[4816]: I0216 13:23:59.777768 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerStarted","Data":"ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab"} Feb 16 13:23:59 crc kubenswrapper[4816]: I0216 13:23:59.784151 4816 generic.go:334] "Generic (PLEG): container finished" podID="c13d2f68-a630-4bfc-a909-0c83418e55bc" containerID="96a26d4a869ea29f2d4c6578c22ea996c7dcf18bdf02530d972d1e05dfd28823" exitCode=0 Feb 16 13:23:59 crc kubenswrapper[4816]: I0216 13:23:59.786025 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-z8cp5" event={"ID":"c13d2f68-a630-4bfc-a909-0c83418e55bc","Type":"ContainerDied","Data":"96a26d4a869ea29f2d4c6578c22ea996c7dcf18bdf02530d972d1e05dfd28823"} Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.424096 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.429399 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.440020 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-twj49" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.470463 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.539957 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bqhp\" (UniqueName: \"kubernetes.io/projected/778f3898-c77a-4905-9c88-4f0222c75817-kube-api-access-2bqhp\") pod \"778f3898-c77a-4905-9c88-4f0222c75817\" (UID: \"778f3898-c77a-4905-9c88-4f0222c75817\") " Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.540098 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3898-c77a-4905-9c88-4f0222c75817-operator-scripts\") pod \"778f3898-c77a-4905-9c88-4f0222c75817\" (UID: \"778f3898-c77a-4905-9c88-4f0222c75817\") " Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.540116 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8tbh\" (UniqueName: \"kubernetes.io/projected/c13d2f68-a630-4bfc-a909-0c83418e55bc-kube-api-access-h8tbh\") pod \"c13d2f68-a630-4bfc-a909-0c83418e55bc\" (UID: \"c13d2f68-a630-4bfc-a909-0c83418e55bc\") " Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.540209 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c13d2f68-a630-4bfc-a909-0c83418e55bc-operator-scripts\") pod \"c13d2f68-a630-4bfc-a909-0c83418e55bc\" (UID: \"c13d2f68-a630-4bfc-a909-0c83418e55bc\") " Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.540267 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz4sx\" (UniqueName: \"kubernetes.io/projected/119a94e7-cb72-4388-b3f0-78d2de19889f-kube-api-access-nz4sx\") pod \"119a94e7-cb72-4388-b3f0-78d2de19889f\" (UID: \"119a94e7-cb72-4388-b3f0-78d2de19889f\") " Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.540313 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/119a94e7-cb72-4388-b3f0-78d2de19889f-operator-scripts\") pod \"119a94e7-cb72-4388-b3f0-78d2de19889f\" (UID: \"119a94e7-cb72-4388-b3f0-78d2de19889f\") " Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.541038 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/119a94e7-cb72-4388-b3f0-78d2de19889f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "119a94e7-cb72-4388-b3f0-78d2de19889f" (UID: "119a94e7-cb72-4388-b3f0-78d2de19889f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.541389 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c13d2f68-a630-4bfc-a909-0c83418e55bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c13d2f68-a630-4bfc-a909-0c83418e55bc" (UID: "c13d2f68-a630-4bfc-a909-0c83418e55bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.543114 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/778f3898-c77a-4905-9c88-4f0222c75817-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "778f3898-c77a-4905-9c88-4f0222c75817" (UID: "778f3898-c77a-4905-9c88-4f0222c75817"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.545960 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c13d2f68-a630-4bfc-a909-0c83418e55bc-kube-api-access-h8tbh" (OuterVolumeSpecName: "kube-api-access-h8tbh") pod "c13d2f68-a630-4bfc-a909-0c83418e55bc" (UID: "c13d2f68-a630-4bfc-a909-0c83418e55bc"). InnerVolumeSpecName "kube-api-access-h8tbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.546263 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/778f3898-c77a-4905-9c88-4f0222c75817-kube-api-access-2bqhp" (OuterVolumeSpecName: "kube-api-access-2bqhp") pod "778f3898-c77a-4905-9c88-4f0222c75817" (UID: "778f3898-c77a-4905-9c88-4f0222c75817"). InnerVolumeSpecName "kube-api-access-2bqhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.560973 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/119a94e7-cb72-4388-b3f0-78d2de19889f-kube-api-access-nz4sx" (OuterVolumeSpecName: "kube-api-access-nz4sx") pod "119a94e7-cb72-4388-b3f0-78d2de19889f" (UID: "119a94e7-cb72-4388-b3f0-78d2de19889f"). InnerVolumeSpecName "kube-api-access-nz4sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.642421 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f64f8bcc-d25f-4799-b916-7604027ba614-operator-scripts\") pod \"f64f8bcc-d25f-4799-b916-7604027ba614\" (UID: \"f64f8bcc-d25f-4799-b916-7604027ba614\") " Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.642779 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qn6sv\" (UniqueName: \"kubernetes.io/projected/f64f8bcc-d25f-4799-b916-7604027ba614-kube-api-access-qn6sv\") pod \"f64f8bcc-d25f-4799-b916-7604027ba614\" (UID: \"f64f8bcc-d25f-4799-b916-7604027ba614\") " Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.642788 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f64f8bcc-d25f-4799-b916-7604027ba614-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f64f8bcc-d25f-4799-b916-7604027ba614" (UID: "f64f8bcc-d25f-4799-b916-7604027ba614"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.643392 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c13d2f68-a630-4bfc-a909-0c83418e55bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.643411 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f64f8bcc-d25f-4799-b916-7604027ba614-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.643421 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz4sx\" (UniqueName: \"kubernetes.io/projected/119a94e7-cb72-4388-b3f0-78d2de19889f-kube-api-access-nz4sx\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.643431 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/119a94e7-cb72-4388-b3f0-78d2de19889f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.643442 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bqhp\" (UniqueName: \"kubernetes.io/projected/778f3898-c77a-4905-9c88-4f0222c75817-kube-api-access-2bqhp\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.643451 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/778f3898-c77a-4905-9c88-4f0222c75817-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.643459 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8tbh\" (UniqueName: \"kubernetes.io/projected/c13d2f68-a630-4bfc-a909-0c83418e55bc-kube-api-access-h8tbh\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.646149 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f64f8bcc-d25f-4799-b916-7604027ba614-kube-api-access-qn6sv" (OuterVolumeSpecName: "kube-api-access-qn6sv") pod "f64f8bcc-d25f-4799-b916-7604027ba614" (UID: "f64f8bcc-d25f-4799-b916-7604027ba614"). InnerVolumeSpecName "kube-api-access-qn6sv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.745537 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qn6sv\" (UniqueName: \"kubernetes.io/projected/f64f8bcc-d25f-4799-b916-7604027ba614-kube-api-access-qn6sv\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.795280 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z4v4v" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.795273 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-z4v4v" event={"ID":"119a94e7-cb72-4388-b3f0-78d2de19889f","Type":"ContainerDied","Data":"70df48aa4f167e9cdbc4ae67f30240ca0813136358d27ecc5a6399cf59db7088"} Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.795771 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70df48aa4f167e9cdbc4ae67f30240ca0813136358d27ecc5a6399cf59db7088" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.797573 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1245-account-create-update-tbtlc" event={"ID":"f64f8bcc-d25f-4799-b916-7604027ba614","Type":"ContainerDied","Data":"837126632a601b8ee29c1da7d607ddefb258cdc0a8c74b23f71e5a342274b60b"} Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.797609 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="837126632a601b8ee29c1da7d607ddefb258cdc0a8c74b23f71e5a342274b60b" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.797622 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1245-account-create-update-tbtlc" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.799238 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-z8cp5" event={"ID":"c13d2f68-a630-4bfc-a909-0c83418e55bc","Type":"ContainerDied","Data":"d1705c2ab1ffce5f9a17cd8021955051e6e0eb3f87e607a9d1fcc8369516b25a"} Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.799279 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1705c2ab1ffce5f9a17cd8021955051e6e0eb3f87e607a9d1fcc8369516b25a" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.799247 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-z8cp5" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.802244 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-twj49" event={"ID":"778f3898-c77a-4905-9c88-4f0222c75817","Type":"ContainerDied","Data":"0b1e863f043d475527603c8bc56b719e9424199254b2b0fb024191228d0b4aff"} Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.802291 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b1e863f043d475527603c8bc56b719e9424199254b2b0fb024191228d0b4aff" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.802347 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-twj49" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.806079 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerStarted","Data":"8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f"} Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.806236 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="ceilometer-central-agent" containerID="cri-o://661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01" gracePeriod=30 Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.806358 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="proxy-httpd" containerID="cri-o://8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f" gracePeriod=30 Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.806409 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="ceilometer-notification-agent" containerID="cri-o://60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84" gracePeriod=30 Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.806506 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.806421 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="sg-core" containerID="cri-o://ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab" gracePeriod=30 Feb 16 13:24:00 crc kubenswrapper[4816]: I0216 13:24:00.859135 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=7.919775049 podStartE2EDuration="12.859112634s" podCreationTimestamp="2026-02-16 13:23:48 +0000 UTC" firstStartedPulling="2026-02-16 13:23:55.245394421 +0000 UTC m=+1234.572108149" lastFinishedPulling="2026-02-16 13:24:00.184732006 +0000 UTC m=+1239.511445734" observedRunningTime="2026-02-16 13:24:00.842708656 +0000 UTC m=+1240.169422384" watchObservedRunningTime="2026-02-16 13:24:00.859112634 +0000 UTC m=+1240.185826362" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.095344 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.253753 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/049f93f8-ed00-4331-b932-1e4f98ffe8c2-operator-scripts\") pod \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\" (UID: \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\") " Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.254154 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ns5nj\" (UniqueName: \"kubernetes.io/projected/049f93f8-ed00-4331-b932-1e4f98ffe8c2-kube-api-access-ns5nj\") pod \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\" (UID: \"049f93f8-ed00-4331-b932-1e4f98ffe8c2\") " Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.254743 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/049f93f8-ed00-4331-b932-1e4f98ffe8c2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "049f93f8-ed00-4331-b932-1e4f98ffe8c2" (UID: "049f93f8-ed00-4331-b932-1e4f98ffe8c2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.260956 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/049f93f8-ed00-4331-b932-1e4f98ffe8c2-kube-api-access-ns5nj" (OuterVolumeSpecName: "kube-api-access-ns5nj") pod "049f93f8-ed00-4331-b932-1e4f98ffe8c2" (UID: "049f93f8-ed00-4331-b932-1e4f98ffe8c2"). InnerVolumeSpecName "kube-api-access-ns5nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.315333 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.503384 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ns5nj\" (UniqueName: \"kubernetes.io/projected/049f93f8-ed00-4331-b932-1e4f98ffe8c2-kube-api-access-ns5nj\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.503458 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/049f93f8-ed00-4331-b932-1e4f98ffe8c2-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.606538 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-operator-scripts\") pod \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\" (UID: \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\") " Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.606607 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcqfd\" (UniqueName: \"kubernetes.io/projected/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-kube-api-access-tcqfd\") pod \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\" (UID: \"aeb7d949-8e61-4bfa-8b02-0213a7861b9f\") " Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.607109 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "aeb7d949-8e61-4bfa-8b02-0213a7861b9f" (UID: "aeb7d949-8e61-4bfa-8b02-0213a7861b9f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.627394 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-kube-api-access-tcqfd" (OuterVolumeSpecName: "kube-api-access-tcqfd") pod "aeb7d949-8e61-4bfa-8b02-0213a7861b9f" (UID: "aeb7d949-8e61-4bfa-8b02-0213a7861b9f"). InnerVolumeSpecName "kube-api-access-tcqfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.708864 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.708897 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcqfd\" (UniqueName: \"kubernetes.io/projected/aeb7d949-8e61-4bfa-8b02-0213a7861b9f-kube-api-access-tcqfd\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.818651 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" event={"ID":"049f93f8-ed00-4331-b932-1e4f98ffe8c2","Type":"ContainerDied","Data":"29c2c7de7081a84214c9b9ea1d7694d4ac648f44c9e49fffa8c75ce0b678b71a"} Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.818716 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29c2c7de7081a84214c9b9ea1d7694d4ac648f44c9e49fffa8c75ce0b678b71a" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.818775 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c4fc-account-create-update-blhnm" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.822036 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" event={"ID":"aeb7d949-8e61-4bfa-8b02-0213a7861b9f","Type":"ContainerDied","Data":"5bc5f598811cafeb6fe28af7d0a20623e601986f53c326dd4323507552096b94"} Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.822070 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bc5f598811cafeb6fe28af7d0a20623e601986f53c326dd4323507552096b94" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.822116 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d27b-account-create-update-5ljmf" Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.829435 4816 generic.go:334] "Generic (PLEG): container finished" podID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerID="8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f" exitCode=0 Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.829472 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerDied","Data":"8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f"} Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.829515 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerDied","Data":"ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab"} Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.829479 4816 generic.go:334] "Generic (PLEG): container finished" podID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerID="ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab" exitCode=2 Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.829545 4816 generic.go:334] "Generic (PLEG): container finished" podID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerID="60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84" exitCode=0 Feb 16 13:24:01 crc kubenswrapper[4816]: I0216 13:24:01.829562 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerDied","Data":"60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84"} Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.647930 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.776721 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-config-data\") pod \"f39547de-3b98-4e11-9ef8-2e3744b82e23\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.776816 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dk268\" (UniqueName: \"kubernetes.io/projected/f39547de-3b98-4e11-9ef8-2e3744b82e23-kube-api-access-dk268\") pod \"f39547de-3b98-4e11-9ef8-2e3744b82e23\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.776866 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-log-httpd\") pod \"f39547de-3b98-4e11-9ef8-2e3744b82e23\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.776895 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-run-httpd\") pod \"f39547de-3b98-4e11-9ef8-2e3744b82e23\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.776982 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-sg-core-conf-yaml\") pod \"f39547de-3b98-4e11-9ef8-2e3744b82e23\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.777795 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f39547de-3b98-4e11-9ef8-2e3744b82e23" (UID: "f39547de-3b98-4e11-9ef8-2e3744b82e23"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.777893 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-combined-ca-bundle\") pod \"f39547de-3b98-4e11-9ef8-2e3744b82e23\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.778155 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-scripts\") pod \"f39547de-3b98-4e11-9ef8-2e3744b82e23\" (UID: \"f39547de-3b98-4e11-9ef8-2e3744b82e23\") " Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.778055 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f39547de-3b98-4e11-9ef8-2e3744b82e23" (UID: "f39547de-3b98-4e11-9ef8-2e3744b82e23"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.778744 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.778767 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39547de-3b98-4e11-9ef8-2e3744b82e23-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.782111 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f39547de-3b98-4e11-9ef8-2e3744b82e23-kube-api-access-dk268" (OuterVolumeSpecName: "kube-api-access-dk268") pod "f39547de-3b98-4e11-9ef8-2e3744b82e23" (UID: "f39547de-3b98-4e11-9ef8-2e3744b82e23"). InnerVolumeSpecName "kube-api-access-dk268". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.782710 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-scripts" (OuterVolumeSpecName: "scripts") pod "f39547de-3b98-4e11-9ef8-2e3744b82e23" (UID: "f39547de-3b98-4e11-9ef8-2e3744b82e23"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.810832 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f39547de-3b98-4e11-9ef8-2e3744b82e23" (UID: "f39547de-3b98-4e11-9ef8-2e3744b82e23"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.874837 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f39547de-3b98-4e11-9ef8-2e3744b82e23" (UID: "f39547de-3b98-4e11-9ef8-2e3744b82e23"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.880448 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.880484 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.880496 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dk268\" (UniqueName: \"kubernetes.io/projected/f39547de-3b98-4e11-9ef8-2e3744b82e23-kube-api-access-dk268\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.880510 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.893562 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-config-data" (OuterVolumeSpecName: "config-data") pod "f39547de-3b98-4e11-9ef8-2e3744b82e23" (UID: "f39547de-3b98-4e11-9ef8-2e3744b82e23"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.905570 4816 generic.go:334] "Generic (PLEG): container finished" podID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerID="661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01" exitCode=0 Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.905629 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.905650 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerDied","Data":"661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01"} Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.906016 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39547de-3b98-4e11-9ef8-2e3744b82e23","Type":"ContainerDied","Data":"840ee6dd2cbe2bf959925ff45085f1364c463b72cdfa7785356d4ecd9c5a38d2"} Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.906040 4816 scope.go:117] "RemoveContainer" containerID="8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.925044 4816 scope.go:117] "RemoveContainer" containerID="ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.953050 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.963110 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.970444 4816 scope.go:117] "RemoveContainer" containerID="60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974247 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.974762 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="778f3898-c77a-4905-9c88-4f0222c75817" containerName="mariadb-database-create" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974789 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="778f3898-c77a-4905-9c88-4f0222c75817" containerName="mariadb-database-create" Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.974812 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64f8bcc-d25f-4799-b916-7604027ba614" containerName="mariadb-account-create-update" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974822 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64f8bcc-d25f-4799-b916-7604027ba614" containerName="mariadb-account-create-update" Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.974843 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeb7d949-8e61-4bfa-8b02-0213a7861b9f" containerName="mariadb-account-create-update" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974851 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeb7d949-8e61-4bfa-8b02-0213a7861b9f" containerName="mariadb-account-create-update" Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.974864 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="119a94e7-cb72-4388-b3f0-78d2de19889f" containerName="mariadb-database-create" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974871 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="119a94e7-cb72-4388-b3f0-78d2de19889f" containerName="mariadb-database-create" Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.974882 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="049f93f8-ed00-4331-b932-1e4f98ffe8c2" containerName="mariadb-account-create-update" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974890 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="049f93f8-ed00-4331-b932-1e4f98ffe8c2" containerName="mariadb-account-create-update" Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.974910 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="ceilometer-central-agent" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974918 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="ceilometer-central-agent" Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.974933 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="sg-core" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974942 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="sg-core" Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.974953 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="ceilometer-notification-agent" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974958 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="ceilometer-notification-agent" Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.974979 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="proxy-httpd" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.974991 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="proxy-httpd" Feb 16 13:24:05 crc kubenswrapper[4816]: E0216 13:24:05.975009 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c13d2f68-a630-4bfc-a909-0c83418e55bc" containerName="mariadb-database-create" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975017 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c13d2f68-a630-4bfc-a909-0c83418e55bc" containerName="mariadb-database-create" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975206 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="ceilometer-central-agent" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975224 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="proxy-httpd" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975242 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="778f3898-c77a-4905-9c88-4f0222c75817" containerName="mariadb-database-create" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975251 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeb7d949-8e61-4bfa-8b02-0213a7861b9f" containerName="mariadb-account-create-update" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975260 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="sg-core" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975275 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c13d2f68-a630-4bfc-a909-0c83418e55bc" containerName="mariadb-database-create" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975285 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="119a94e7-cb72-4388-b3f0-78d2de19889f" containerName="mariadb-database-create" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975296 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64f8bcc-d25f-4799-b916-7604027ba614" containerName="mariadb-account-create-update" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975309 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" containerName="ceilometer-notification-agent" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.975325 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="049f93f8-ed00-4331-b932-1e4f98ffe8c2" containerName="mariadb-account-create-update" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.977277 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.984245 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjw2c\" (UniqueName: \"kubernetes.io/projected/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-kube-api-access-fjw2c\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.984349 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-config-data\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.984413 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-log-httpd\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.984473 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.984494 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-run-httpd\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.984530 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-scripts\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.984554 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.984697 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39547de-3b98-4e11-9ef8-2e3744b82e23-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.985306 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.985534 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 13:24:05 crc kubenswrapper[4816]: I0216 13:24:05.990838 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.019895 4816 scope.go:117] "RemoveContainer" containerID="661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.043939 4816 scope.go:117] "RemoveContainer" containerID="8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f" Feb 16 13:24:06 crc kubenswrapper[4816]: E0216 13:24:06.044526 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f\": container with ID starting with 8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f not found: ID does not exist" containerID="8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.044575 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f"} err="failed to get container status \"8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f\": rpc error: code = NotFound desc = could not find container \"8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f\": container with ID starting with 8c45a00692adc0c790ca2f6fe4b819985b70db2973722ca0c1197fbb44684f7f not found: ID does not exist" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.044602 4816 scope.go:117] "RemoveContainer" containerID="ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab" Feb 16 13:24:06 crc kubenswrapper[4816]: E0216 13:24:06.045018 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab\": container with ID starting with ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab not found: ID does not exist" containerID="ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.045041 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab"} err="failed to get container status \"ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab\": rpc error: code = NotFound desc = could not find container \"ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab\": container with ID starting with ee960b74e5090ba35d38a65f5cb007b62c848605b63e091b32161cb871bc56ab not found: ID does not exist" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.045057 4816 scope.go:117] "RemoveContainer" containerID="60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84" Feb 16 13:24:06 crc kubenswrapper[4816]: E0216 13:24:06.045440 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84\": container with ID starting with 60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84 not found: ID does not exist" containerID="60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.045464 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84"} err="failed to get container status \"60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84\": rpc error: code = NotFound desc = could not find container \"60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84\": container with ID starting with 60573ab2dc2f8b3c1b6c95d4077a4882d8d58005cb09cddd32f62e03edd14c84 not found: ID does not exist" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.045482 4816 scope.go:117] "RemoveContainer" containerID="661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01" Feb 16 13:24:06 crc kubenswrapper[4816]: E0216 13:24:06.045791 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01\": container with ID starting with 661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01 not found: ID does not exist" containerID="661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.045830 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01"} err="failed to get container status \"661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01\": rpc error: code = NotFound desc = could not find container \"661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01\": container with ID starting with 661d570fbc7360baea044989fc519c18d77f199627e402aea89e4cc0f0e12d01 not found: ID does not exist" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.086577 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjw2c\" (UniqueName: \"kubernetes.io/projected/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-kube-api-access-fjw2c\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.086704 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-config-data\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.086814 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-log-httpd\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.086856 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.086892 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-run-httpd\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.086918 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-scripts\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.086935 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.092478 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.092871 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-run-httpd\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.094953 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-log-httpd\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.097002 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.097840 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-config-data\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.105149 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-scripts\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.111374 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjw2c\" (UniqueName: \"kubernetes.io/projected/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-kube-api-access-fjw2c\") pod \"ceilometer-0\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.309368 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.773399 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.916034 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerStarted","Data":"875138ea00c73d86b89f425dd6909c8a071ac4133a400f6a329a9c36b01d1ea5"} Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.940951 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:24:06 crc kubenswrapper[4816]: I0216 13:24:06.941019 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.251804 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-28k78"] Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.253146 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.256351 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.256556 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.256788 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mrtw9" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.262831 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-28k78"] Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.411390 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.411534 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-config-data\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.411577 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kq6j\" (UniqueName: \"kubernetes.io/projected/c300f0d3-72e0-46f6-9910-9dda53a08e13-kube-api-access-7kq6j\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.411685 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-scripts\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.417302 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f39547de-3b98-4e11-9ef8-2e3744b82e23" path="/var/lib/kubelet/pods/f39547de-3b98-4e11-9ef8-2e3744b82e23/volumes" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.514139 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-config-data\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.514220 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kq6j\" (UniqueName: \"kubernetes.io/projected/c300f0d3-72e0-46f6-9910-9dda53a08e13-kube-api-access-7kq6j\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.514290 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-scripts\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.514499 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.521128 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-scripts\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.521688 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-config-data\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.537504 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.557624 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kq6j\" (UniqueName: \"kubernetes.io/projected/c300f0d3-72e0-46f6-9910-9dda53a08e13-kube-api-access-7kq6j\") pod \"nova-cell0-conductor-db-sync-28k78\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.712874 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.855749 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:07 crc kubenswrapper[4816]: I0216 13:24:07.927743 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerStarted","Data":"c50eaa547650507a4ecdd0885ec295c38b8bf4d68076cb0f13d111ccbb85db8a"} Feb 16 13:24:08 crc kubenswrapper[4816]: W0216 13:24:08.214828 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc300f0d3_72e0_46f6_9910_9dda53a08e13.slice/crio-d1e819b93270b49a3321359b9bdb0b4156c56db6d422d60f2049e4fd4d9e7978 WatchSource:0}: Error finding container d1e819b93270b49a3321359b9bdb0b4156c56db6d422d60f2049e4fd4d9e7978: Status 404 returned error can't find the container with id d1e819b93270b49a3321359b9bdb0b4156c56db6d422d60f2049e4fd4d9e7978 Feb 16 13:24:08 crc kubenswrapper[4816]: I0216 13:24:08.216783 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-28k78"] Feb 16 13:24:08 crc kubenswrapper[4816]: I0216 13:24:08.947487 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerStarted","Data":"abe4135d6fb30592a1a08f5935aa1aa115f9904ecb815acddca5fef165a803f4"} Feb 16 13:24:08 crc kubenswrapper[4816]: I0216 13:24:08.947946 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerStarted","Data":"a7b9cd8ef35d28b3a48a3967c0e6c3ab65707717ef7d5a90138ff6360d7f2df4"} Feb 16 13:24:08 crc kubenswrapper[4816]: I0216 13:24:08.948913 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-28k78" event={"ID":"c300f0d3-72e0-46f6-9910-9dda53a08e13","Type":"ContainerStarted","Data":"d1e819b93270b49a3321359b9bdb0b4156c56db6d422d60f2049e4fd4d9e7978"} Feb 16 13:24:10 crc kubenswrapper[4816]: I0216 13:24:10.973211 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerStarted","Data":"35f135eed29101274a34e4c5db5716034a3b012ea91da92294212695ba67d703"} Feb 16 13:24:10 crc kubenswrapper[4816]: I0216 13:24:10.973510 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="ceilometer-central-agent" containerID="cri-o://c50eaa547650507a4ecdd0885ec295c38b8bf4d68076cb0f13d111ccbb85db8a" gracePeriod=30 Feb 16 13:24:10 crc kubenswrapper[4816]: I0216 13:24:10.973924 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="proxy-httpd" containerID="cri-o://35f135eed29101274a34e4c5db5716034a3b012ea91da92294212695ba67d703" gracePeriod=30 Feb 16 13:24:10 crc kubenswrapper[4816]: I0216 13:24:10.974004 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="ceilometer-notification-agent" containerID="cri-o://a7b9cd8ef35d28b3a48a3967c0e6c3ab65707717ef7d5a90138ff6360d7f2df4" gracePeriod=30 Feb 16 13:24:10 crc kubenswrapper[4816]: I0216 13:24:10.974041 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 13:24:10 crc kubenswrapper[4816]: I0216 13:24:10.974063 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="sg-core" containerID="cri-o://abe4135d6fb30592a1a08f5935aa1aa115f9904ecb815acddca5fef165a803f4" gracePeriod=30 Feb 16 13:24:10 crc kubenswrapper[4816]: I0216 13:24:10.998464 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.250015653 podStartE2EDuration="5.998445374s" podCreationTimestamp="2026-02-16 13:24:05 +0000 UTC" firstStartedPulling="2026-02-16 13:24:06.779830752 +0000 UTC m=+1246.106544480" lastFinishedPulling="2026-02-16 13:24:10.528260483 +0000 UTC m=+1249.854974201" observedRunningTime="2026-02-16 13:24:10.994758814 +0000 UTC m=+1250.321472542" watchObservedRunningTime="2026-02-16 13:24:10.998445374 +0000 UTC m=+1250.325159102" Feb 16 13:24:11 crc kubenswrapper[4816]: E0216 13:24:11.115173 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66e2cf4e_1eb6_4def_8410_89bd7ab8785d.slice/crio-conmon-abe4135d6fb30592a1a08f5935aa1aa115f9904ecb815acddca5fef165a803f4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66e2cf4e_1eb6_4def_8410_89bd7ab8785d.slice/crio-35f135eed29101274a34e4c5db5716034a3b012ea91da92294212695ba67d703.scope\": RecentStats: unable to find data in memory cache]" Feb 16 13:24:11 crc kubenswrapper[4816]: I0216 13:24:11.988727 4816 generic.go:334] "Generic (PLEG): container finished" podID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerID="35f135eed29101274a34e4c5db5716034a3b012ea91da92294212695ba67d703" exitCode=0 Feb 16 13:24:11 crc kubenswrapper[4816]: I0216 13:24:11.989187 4816 generic.go:334] "Generic (PLEG): container finished" podID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerID="abe4135d6fb30592a1a08f5935aa1aa115f9904ecb815acddca5fef165a803f4" exitCode=2 Feb 16 13:24:11 crc kubenswrapper[4816]: I0216 13:24:11.989201 4816 generic.go:334] "Generic (PLEG): container finished" podID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerID="a7b9cd8ef35d28b3a48a3967c0e6c3ab65707717ef7d5a90138ff6360d7f2df4" exitCode=0 Feb 16 13:24:11 crc kubenswrapper[4816]: I0216 13:24:11.988952 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerDied","Data":"35f135eed29101274a34e4c5db5716034a3b012ea91da92294212695ba67d703"} Feb 16 13:24:11 crc kubenswrapper[4816]: I0216 13:24:11.989255 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerDied","Data":"abe4135d6fb30592a1a08f5935aa1aa115f9904ecb815acddca5fef165a803f4"} Feb 16 13:24:11 crc kubenswrapper[4816]: I0216 13:24:11.989274 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerDied","Data":"a7b9cd8ef35d28b3a48a3967c0e6c3ab65707717ef7d5a90138ff6360d7f2df4"} Feb 16 13:24:16 crc kubenswrapper[4816]: I0216 13:24:16.960174 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:24:16 crc kubenswrapper[4816]: I0216 13:24:16.961017 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3219985d-af01-44c0-9945-075a41b0326c" containerName="glance-log" containerID="cri-o://1e09a1f04835f07cfd1b685d682d5e82695b2570c518ab84fd219b72b21c6832" gracePeriod=30 Feb 16 13:24:16 crc kubenswrapper[4816]: I0216 13:24:16.961155 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3219985d-af01-44c0-9945-075a41b0326c" containerName="glance-httpd" containerID="cri-o://4cd7538bae0d430d31985d2458850d36a20537584597e3cc4ca05965d1aef164" gracePeriod=30 Feb 16 13:24:17 crc kubenswrapper[4816]: I0216 13:24:17.188759 4816 generic.go:334] "Generic (PLEG): container finished" podID="3219985d-af01-44c0-9945-075a41b0326c" containerID="1e09a1f04835f07cfd1b685d682d5e82695b2570c518ab84fd219b72b21c6832" exitCode=143 Feb 16 13:24:17 crc kubenswrapper[4816]: I0216 13:24:17.188836 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3219985d-af01-44c0-9945-075a41b0326c","Type":"ContainerDied","Data":"1e09a1f04835f07cfd1b685d682d5e82695b2570c518ab84fd219b72b21c6832"} Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.209854 4816 generic.go:334] "Generic (PLEG): container finished" podID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerID="c50eaa547650507a4ecdd0885ec295c38b8bf4d68076cb0f13d111ccbb85db8a" exitCode=0 Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.209895 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerDied","Data":"c50eaa547650507a4ecdd0885ec295c38b8bf4d68076cb0f13d111ccbb85db8a"} Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.747411 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.928704 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjw2c\" (UniqueName: \"kubernetes.io/projected/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-kube-api-access-fjw2c\") pod \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.929068 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-combined-ca-bundle\") pod \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.929110 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-config-data\") pod \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.929131 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-scripts\") pod \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.929231 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-sg-core-conf-yaml\") pod \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.929318 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-log-httpd\") pod \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.929352 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-run-httpd\") pod \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\" (UID: \"66e2cf4e-1eb6-4def-8410-89bd7ab8785d\") " Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.930175 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "66e2cf4e-1eb6-4def-8410-89bd7ab8785d" (UID: "66e2cf4e-1eb6-4def-8410-89bd7ab8785d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.931183 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "66e2cf4e-1eb6-4def-8410-89bd7ab8785d" (UID: "66e2cf4e-1eb6-4def-8410-89bd7ab8785d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.934363 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-scripts" (OuterVolumeSpecName: "scripts") pod "66e2cf4e-1eb6-4def-8410-89bd7ab8785d" (UID: "66e2cf4e-1eb6-4def-8410-89bd7ab8785d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.936331 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-kube-api-access-fjw2c" (OuterVolumeSpecName: "kube-api-access-fjw2c") pod "66e2cf4e-1eb6-4def-8410-89bd7ab8785d" (UID: "66e2cf4e-1eb6-4def-8410-89bd7ab8785d"). InnerVolumeSpecName "kube-api-access-fjw2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:18 crc kubenswrapper[4816]: I0216 13:24:18.966300 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "66e2cf4e-1eb6-4def-8410-89bd7ab8785d" (UID: "66e2cf4e-1eb6-4def-8410-89bd7ab8785d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.013886 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.014138 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-log" containerID="cri-o://62057e2e47d2b36f712d5caf7f83d928fc945948d0d2f16f1eefee4349c17fca" gracePeriod=30 Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.014375 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-httpd" containerID="cri-o://dd1376ff3c8e54a0019ab33b65e5df0ac628bae3d89bd925953baa92163dded7" gracePeriod=30 Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.031806 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.032023 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.032148 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjw2c\" (UniqueName: \"kubernetes.io/projected/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-kube-api-access-fjw2c\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.032211 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.032270 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.041974 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66e2cf4e-1eb6-4def-8410-89bd7ab8785d" (UID: "66e2cf4e-1eb6-4def-8410-89bd7ab8785d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.073730 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-config-data" (OuterVolumeSpecName: "config-data") pod "66e2cf4e-1eb6-4def-8410-89bd7ab8785d" (UID: "66e2cf4e-1eb6-4def-8410-89bd7ab8785d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.133686 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.133925 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e2cf4e-1eb6-4def-8410-89bd7ab8785d-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.221468 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-28k78" event={"ID":"c300f0d3-72e0-46f6-9910-9dda53a08e13","Type":"ContainerStarted","Data":"56e385b20d8b4b9b80ca388d17160eb68951aa4e48f004e0a892e63e727fe0b0"} Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.225263 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66e2cf4e-1eb6-4def-8410-89bd7ab8785d","Type":"ContainerDied","Data":"875138ea00c73d86b89f425dd6909c8a071ac4133a400f6a329a9c36b01d1ea5"} Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.225291 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.225323 4816 scope.go:117] "RemoveContainer" containerID="35f135eed29101274a34e4c5db5716034a3b012ea91da92294212695ba67d703" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.227895 4816 generic.go:334] "Generic (PLEG): container finished" podID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerID="62057e2e47d2b36f712d5caf7f83d928fc945948d0d2f16f1eefee4349c17fca" exitCode=143 Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.227946 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5fe0907e-dfe1-4446-bb24-9d2630b7f33b","Type":"ContainerDied","Data":"62057e2e47d2b36f712d5caf7f83d928fc945948d0d2f16f1eefee4349c17fca"} Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.249198 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-28k78" podStartSLOduration=1.773750108 podStartE2EDuration="12.249172286s" podCreationTimestamp="2026-02-16 13:24:07 +0000 UTC" firstStartedPulling="2026-02-16 13:24:08.217050653 +0000 UTC m=+1247.543764391" lastFinishedPulling="2026-02-16 13:24:18.692472841 +0000 UTC m=+1258.019186569" observedRunningTime="2026-02-16 13:24:19.233863498 +0000 UTC m=+1258.560577226" watchObservedRunningTime="2026-02-16 13:24:19.249172286 +0000 UTC m=+1258.575886024" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.258950 4816 scope.go:117] "RemoveContainer" containerID="abe4135d6fb30592a1a08f5935aa1aa115f9904ecb815acddca5fef165a803f4" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.321636 4816 scope.go:117] "RemoveContainer" containerID="a7b9cd8ef35d28b3a48a3967c0e6c3ab65707717ef7d5a90138ff6360d7f2df4" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.329403 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.350494 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.352585 4816 scope.go:117] "RemoveContainer" containerID="c50eaa547650507a4ecdd0885ec295c38b8bf4d68076cb0f13d111ccbb85db8a" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.361634 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:19 crc kubenswrapper[4816]: E0216 13:24:19.362166 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="ceilometer-central-agent" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.362189 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="ceilometer-central-agent" Feb 16 13:24:19 crc kubenswrapper[4816]: E0216 13:24:19.362217 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="ceilometer-notification-agent" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.362223 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="ceilometer-notification-agent" Feb 16 13:24:19 crc kubenswrapper[4816]: E0216 13:24:19.362235 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="sg-core" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.362244 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="sg-core" Feb 16 13:24:19 crc kubenswrapper[4816]: E0216 13:24:19.362257 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="proxy-httpd" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.362262 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="proxy-httpd" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.362465 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="ceilometer-central-agent" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.362485 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="proxy-httpd" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.362502 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="sg-core" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.362512 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" containerName="ceilometer-notification-agent" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.364103 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.369078 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.373084 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.373941 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.455372 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66e2cf4e-1eb6-4def-8410-89bd7ab8785d" path="/var/lib/kubelet/pods/66e2cf4e-1eb6-4def-8410-89bd7ab8785d/volumes" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.544213 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-scripts\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.544290 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.544471 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-config-data\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.544517 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.544582 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-log-httpd\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.544644 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-run-httpd\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.544735 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvwrc\" (UniqueName: \"kubernetes.io/projected/e1004fce-1a7a-4783-88af-9147c41d5517-kube-api-access-kvwrc\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.646290 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-log-httpd\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.646347 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-run-httpd\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.646381 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvwrc\" (UniqueName: \"kubernetes.io/projected/e1004fce-1a7a-4783-88af-9147c41d5517-kube-api-access-kvwrc\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.646463 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-scripts\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.646861 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.646947 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-log-httpd\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.647047 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-run-httpd\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.647273 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-config-data\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.647297 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.652170 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.652269 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-config-data\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.652544 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.652963 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-scripts\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.662148 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvwrc\" (UniqueName: \"kubernetes.io/projected/e1004fce-1a7a-4783-88af-9147c41d5517-kube-api-access-kvwrc\") pod \"ceilometer-0\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " pod="openstack/ceilometer-0" Feb 16 13:24:19 crc kubenswrapper[4816]: I0216 13:24:19.748720 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.226101 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.265566 4816 generic.go:334] "Generic (PLEG): container finished" podID="3219985d-af01-44c0-9945-075a41b0326c" containerID="4cd7538bae0d430d31985d2458850d36a20537584597e3cc4ca05965d1aef164" exitCode=0 Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.265645 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3219985d-af01-44c0-9945-075a41b0326c","Type":"ContainerDied","Data":"4cd7538bae0d430d31985d2458850d36a20537584597e3cc4ca05965d1aef164"} Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.703330 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.870442 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-httpd-run\") pod \"3219985d-af01-44c0-9945-075a41b0326c\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.870997 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "3219985d-af01-44c0-9945-075a41b0326c" (UID: "3219985d-af01-44c0-9945-075a41b0326c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.871042 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"3219985d-af01-44c0-9945-075a41b0326c\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.871128 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-combined-ca-bundle\") pod \"3219985d-af01-44c0-9945-075a41b0326c\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.871205 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-config-data\") pod \"3219985d-af01-44c0-9945-075a41b0326c\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.871331 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-public-tls-certs\") pod \"3219985d-af01-44c0-9945-075a41b0326c\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.871365 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-logs\") pod \"3219985d-af01-44c0-9945-075a41b0326c\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.871488 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-scripts\") pod \"3219985d-af01-44c0-9945-075a41b0326c\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.871521 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8l5gk\" (UniqueName: \"kubernetes.io/projected/3219985d-af01-44c0-9945-075a41b0326c-kube-api-access-8l5gk\") pod \"3219985d-af01-44c0-9945-075a41b0326c\" (UID: \"3219985d-af01-44c0-9945-075a41b0326c\") " Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.871821 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-logs" (OuterVolumeSpecName: "logs") pod "3219985d-af01-44c0-9945-075a41b0326c" (UID: "3219985d-af01-44c0-9945-075a41b0326c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.872080 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.872100 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3219985d-af01-44c0-9945-075a41b0326c-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.883947 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-scripts" (OuterVolumeSpecName: "scripts") pod "3219985d-af01-44c0-9945-075a41b0326c" (UID: "3219985d-af01-44c0-9945-075a41b0326c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.884448 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "3219985d-af01-44c0-9945-075a41b0326c" (UID: "3219985d-af01-44c0-9945-075a41b0326c"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.885483 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3219985d-af01-44c0-9945-075a41b0326c-kube-api-access-8l5gk" (OuterVolumeSpecName: "kube-api-access-8l5gk") pod "3219985d-af01-44c0-9945-075a41b0326c" (UID: "3219985d-af01-44c0-9945-075a41b0326c"). InnerVolumeSpecName "kube-api-access-8l5gk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.905014 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3219985d-af01-44c0-9945-075a41b0326c" (UID: "3219985d-af01-44c0-9945-075a41b0326c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.934686 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3219985d-af01-44c0-9945-075a41b0326c" (UID: "3219985d-af01-44c0-9945-075a41b0326c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.940858 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-config-data" (OuterVolumeSpecName: "config-data") pod "3219985d-af01-44c0-9945-075a41b0326c" (UID: "3219985d-af01-44c0-9945-075a41b0326c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.974341 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.974386 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.974399 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.974415 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3219985d-af01-44c0-9945-075a41b0326c-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.974430 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8l5gk\" (UniqueName: \"kubernetes.io/projected/3219985d-af01-44c0-9945-075a41b0326c-kube-api-access-8l5gk\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.974474 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Feb 16 13:24:20 crc kubenswrapper[4816]: I0216 13:24:20.977333 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.019460 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.075547 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.280314 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.280297 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3219985d-af01-44c0-9945-075a41b0326c","Type":"ContainerDied","Data":"d0e7f94ffa7c61b10eb872f9507bf0baea23a602295a7df6bbc77f84875876a8"} Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.281304 4816 scope.go:117] "RemoveContainer" containerID="4cd7538bae0d430d31985d2458850d36a20537584597e3cc4ca05965d1aef164" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.283044 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerStarted","Data":"6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c"} Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.283094 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerStarted","Data":"dda7bbfd08b173369c2c2d7639337cf4d04ff30d14acf4a80797a025d0fc5ecd"} Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.351817 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.357728 4816 scope.go:117] "RemoveContainer" containerID="1e09a1f04835f07cfd1b685d682d5e82695b2570c518ab84fd219b72b21c6832" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.535495 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.535821 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:24:21 crc kubenswrapper[4816]: E0216 13:24:21.536125 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3219985d-af01-44c0-9945-075a41b0326c" containerName="glance-log" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.536141 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3219985d-af01-44c0-9945-075a41b0326c" containerName="glance-log" Feb 16 13:24:21 crc kubenswrapper[4816]: E0216 13:24:21.536160 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3219985d-af01-44c0-9945-075a41b0326c" containerName="glance-httpd" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.536166 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3219985d-af01-44c0-9945-075a41b0326c" containerName="glance-httpd" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.536366 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3219985d-af01-44c0-9945-075a41b0326c" containerName="glance-httpd" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.536380 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3219985d-af01-44c0-9945-075a41b0326c" containerName="glance-log" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.537273 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.537362 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.542805 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.543049 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 16 13:24:21 crc kubenswrapper[4816]: E0216 13:24:21.569296 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3219985d_af01_44c0_9945_075a41b0326c.slice/crio-d0e7f94ffa7c61b10eb872f9507bf0baea23a602295a7df6bbc77f84875876a8\": RecentStats: unable to find data in memory cache]" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.699701 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.699752 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-config-data\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.699804 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.699848 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.700251 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-scripts\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.700279 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-logs\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.700374 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.700476 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98v5j\" (UniqueName: \"kubernetes.io/projected/1940a629-51c3-4dca-a26d-02080dabbd68-kube-api-access-98v5j\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.802530 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.802602 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.802683 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-scripts\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.802709 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-logs\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.802749 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.802793 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98v5j\" (UniqueName: \"kubernetes.io/projected/1940a629-51c3-4dca-a26d-02080dabbd68-kube-api-access-98v5j\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.802872 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.802914 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-config-data\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.802988 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.804070 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.804241 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-logs\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.814112 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-config-data\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.819261 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.819376 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-scripts\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.822206 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.827373 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98v5j\" (UniqueName: \"kubernetes.io/projected/1940a629-51c3-4dca-a26d-02080dabbd68-kube-api-access-98v5j\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.845253 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " pod="openstack/glance-default-external-api-0" Feb 16 13:24:21 crc kubenswrapper[4816]: I0216 13:24:21.899829 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.201733 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.145:9292/healthcheck\": read tcp 10.217.0.2:37200->10.217.0.145:9292: read: connection reset by peer" Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.203370 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.145:9292/healthcheck\": read tcp 10.217.0.2:37198->10.217.0.145:9292: read: connection reset by peer" Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.298619 4816 generic.go:334] "Generic (PLEG): container finished" podID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerID="dd1376ff3c8e54a0019ab33b65e5df0ac628bae3d89bd925953baa92163dded7" exitCode=0 Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.298761 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5fe0907e-dfe1-4446-bb24-9d2630b7f33b","Type":"ContainerDied","Data":"dd1376ff3c8e54a0019ab33b65e5df0ac628bae3d89bd925953baa92163dded7"} Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.301466 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerStarted","Data":"9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40"} Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.521817 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:24:22 crc kubenswrapper[4816]: W0216 13:24:22.532171 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1940a629_51c3_4dca_a26d_02080dabbd68.slice/crio-dcf24c6ef294d5b0a3ea62a0e377b80d57f49354a4c6688afb006f072591e528 WatchSource:0}: Error finding container dcf24c6ef294d5b0a3ea62a0e377b80d57f49354a4c6688afb006f072591e528: Status 404 returned error can't find the container with id dcf24c6ef294d5b0a3ea62a0e377b80d57f49354a4c6688afb006f072591e528 Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.758567 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.956956 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-scripts\") pod \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.957475 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-config-data\") pod \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.957637 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgjs8\" (UniqueName: \"kubernetes.io/projected/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-kube-api-access-fgjs8\") pod \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.957711 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-httpd-run\") pod \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.957882 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-combined-ca-bundle\") pod \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.958167 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-internal-tls-certs\") pod \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.958312 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.958381 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-logs\") pod \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\" (UID: \"5fe0907e-dfe1-4446-bb24-9d2630b7f33b\") " Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.959253 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-logs" (OuterVolumeSpecName: "logs") pod "5fe0907e-dfe1-4446-bb24-9d2630b7f33b" (UID: "5fe0907e-dfe1-4446-bb24-9d2630b7f33b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.959433 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5fe0907e-dfe1-4446-bb24-9d2630b7f33b" (UID: "5fe0907e-dfe1-4446-bb24-9d2630b7f33b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.973131 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-scripts" (OuterVolumeSpecName: "scripts") pod "5fe0907e-dfe1-4446-bb24-9d2630b7f33b" (UID: "5fe0907e-dfe1-4446-bb24-9d2630b7f33b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.973195 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-kube-api-access-fgjs8" (OuterVolumeSpecName: "kube-api-access-fgjs8") pod "5fe0907e-dfe1-4446-bb24-9d2630b7f33b" (UID: "5fe0907e-dfe1-4446-bb24-9d2630b7f33b"). InnerVolumeSpecName "kube-api-access-fgjs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:22 crc kubenswrapper[4816]: I0216 13:24:22.981689 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "5fe0907e-dfe1-4446-bb24-9d2630b7f33b" (UID: "5fe0907e-dfe1-4446-bb24-9d2630b7f33b"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.017931 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5fe0907e-dfe1-4446-bb24-9d2630b7f33b" (UID: "5fe0907e-dfe1-4446-bb24-9d2630b7f33b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.061795 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.061844 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.061854 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.061865 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgjs8\" (UniqueName: \"kubernetes.io/projected/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-kube-api-access-fgjs8\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.061879 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.061888 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.062627 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5fe0907e-dfe1-4446-bb24-9d2630b7f33b" (UID: "5fe0907e-dfe1-4446-bb24-9d2630b7f33b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.091939 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.104128 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-config-data" (OuterVolumeSpecName: "config-data") pod "5fe0907e-dfe1-4446-bb24-9d2630b7f33b" (UID: "5fe0907e-dfe1-4446-bb24-9d2630b7f33b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.164812 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.164879 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fe0907e-dfe1-4446-bb24-9d2630b7f33b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.164897 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.317144 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerStarted","Data":"b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742"} Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.318759 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1940a629-51c3-4dca-a26d-02080dabbd68","Type":"ContainerStarted","Data":"dcf24c6ef294d5b0a3ea62a0e377b80d57f49354a4c6688afb006f072591e528"} Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.321031 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5fe0907e-dfe1-4446-bb24-9d2630b7f33b","Type":"ContainerDied","Data":"1bbc0ee5c1cc4dbc9efffd30c3c7266ed044d836c864cfe6f934cff698646a82"} Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.321067 4816 scope.go:117] "RemoveContainer" containerID="dd1376ff3c8e54a0019ab33b65e5df0ac628bae3d89bd925953baa92163dded7" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.321166 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.367182 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.370053 4816 scope.go:117] "RemoveContainer" containerID="62057e2e47d2b36f712d5caf7f83d928fc945948d0d2f16f1eefee4349c17fca" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.378203 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.410738 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3219985d-af01-44c0-9945-075a41b0326c" path="/var/lib/kubelet/pods/3219985d-af01-44c0-9945-075a41b0326c/volumes" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.412265 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" path="/var/lib/kubelet/pods/5fe0907e-dfe1-4446-bb24-9d2630b7f33b/volumes" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.413156 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:24:23 crc kubenswrapper[4816]: E0216 13:24:23.413444 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-log" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.413462 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-log" Feb 16 13:24:23 crc kubenswrapper[4816]: E0216 13:24:23.413487 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-httpd" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.413494 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-httpd" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.414305 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-log" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.414328 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fe0907e-dfe1-4446-bb24-9d2630b7f33b" containerName="glance-httpd" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.415236 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.419543 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.423538 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.428287 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.571917 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.572235 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.572291 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-logs\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.572370 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.572402 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.572444 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.572458 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.572479 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h55cd\" (UniqueName: \"kubernetes.io/projected/6c7838cc-1729-405a-af1e-ad0f1b9884f7-kube-api-access-h55cd\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.675034 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.675072 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-logs\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.675118 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.675144 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.675223 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.675239 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.675257 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h55cd\" (UniqueName: \"kubernetes.io/projected/6c7838cc-1729-405a-af1e-ad0f1b9884f7-kube-api-access-h55cd\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.675330 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.686557 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.686826 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-logs\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.687131 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.697701 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.714866 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.715652 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.725257 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.748001 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h55cd\" (UniqueName: \"kubernetes.io/projected/6c7838cc-1729-405a-af1e-ad0f1b9884f7-kube-api-access-h55cd\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:23 crc kubenswrapper[4816]: I0216 13:24:23.848182 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " pod="openstack/glance-default-internal-api-0" Feb 16 13:24:24 crc kubenswrapper[4816]: I0216 13:24:24.035736 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:24 crc kubenswrapper[4816]: I0216 13:24:24.338309 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1940a629-51c3-4dca-a26d-02080dabbd68","Type":"ContainerStarted","Data":"b6ca05adab300fd9ccf2880dc760585e09aaf0866632bf3a934d424a6c6e0afc"} Feb 16 13:24:24 crc kubenswrapper[4816]: I0216 13:24:24.681934 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:24:24 crc kubenswrapper[4816]: W0216 13:24:24.699096 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c7838cc_1729_405a_af1e_ad0f1b9884f7.slice/crio-639ca0f5b315128b6710b73b225ce1d866d10ac9c9fcfbe69a22b955a1a3f8ff WatchSource:0}: Error finding container 639ca0f5b315128b6710b73b225ce1d866d10ac9c9fcfbe69a22b955a1a3f8ff: Status 404 returned error can't find the container with id 639ca0f5b315128b6710b73b225ce1d866d10ac9c9fcfbe69a22b955a1a3f8ff Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.350104 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerStarted","Data":"012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74"} Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.350578 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.350318 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="sg-core" containerID="cri-o://b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742" gracePeriod=30 Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.350359 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="proxy-httpd" containerID="cri-o://012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74" gracePeriod=30 Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.350375 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="ceilometer-notification-agent" containerID="cri-o://9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40" gracePeriod=30 Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.350177 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="ceilometer-central-agent" containerID="cri-o://6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c" gracePeriod=30 Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.354672 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1940a629-51c3-4dca-a26d-02080dabbd68","Type":"ContainerStarted","Data":"b594b400e21605362a39b0644bd2c43537ea857aedc6e60fe673ee3964203cf8"} Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.360462 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6c7838cc-1729-405a-af1e-ad0f1b9884f7","Type":"ContainerStarted","Data":"639ca0f5b315128b6710b73b225ce1d866d10ac9c9fcfbe69a22b955a1a3f8ff"} Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.386688 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.029352813 podStartE2EDuration="6.386673344s" podCreationTimestamp="2026-02-16 13:24:19 +0000 UTC" firstStartedPulling="2026-02-16 13:24:20.243554693 +0000 UTC m=+1259.570268421" lastFinishedPulling="2026-02-16 13:24:24.600875224 +0000 UTC m=+1263.927588952" observedRunningTime="2026-02-16 13:24:25.380165597 +0000 UTC m=+1264.706879325" watchObservedRunningTime="2026-02-16 13:24:25.386673344 +0000 UTC m=+1264.713387062" Feb 16 13:24:25 crc kubenswrapper[4816]: I0216 13:24:25.438152 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.438131279 podStartE2EDuration="4.438131279s" podCreationTimestamp="2026-02-16 13:24:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:24:25.417312881 +0000 UTC m=+1264.744026619" watchObservedRunningTime="2026-02-16 13:24:25.438131279 +0000 UTC m=+1264.764845007" Feb 16 13:24:26 crc kubenswrapper[4816]: I0216 13:24:26.371419 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6c7838cc-1729-405a-af1e-ad0f1b9884f7","Type":"ContainerStarted","Data":"527ce40525cb7c9b030d6afba0202147d041220b639540ecbe06dabb3e1425e2"} Feb 16 13:24:26 crc kubenswrapper[4816]: I0216 13:24:26.371786 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6c7838cc-1729-405a-af1e-ad0f1b9884f7","Type":"ContainerStarted","Data":"be17341e9a947a7465a0a48dedaaad75abdb811c1f90696013b655b457845fe7"} Feb 16 13:24:26 crc kubenswrapper[4816]: I0216 13:24:26.376209 4816 generic.go:334] "Generic (PLEG): container finished" podID="e1004fce-1a7a-4783-88af-9147c41d5517" containerID="012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74" exitCode=0 Feb 16 13:24:26 crc kubenswrapper[4816]: I0216 13:24:26.376239 4816 generic.go:334] "Generic (PLEG): container finished" podID="e1004fce-1a7a-4783-88af-9147c41d5517" containerID="b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742" exitCode=2 Feb 16 13:24:26 crc kubenswrapper[4816]: I0216 13:24:26.376246 4816 generic.go:334] "Generic (PLEG): container finished" podID="e1004fce-1a7a-4783-88af-9147c41d5517" containerID="9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40" exitCode=0 Feb 16 13:24:26 crc kubenswrapper[4816]: I0216 13:24:26.376279 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerDied","Data":"012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74"} Feb 16 13:24:26 crc kubenswrapper[4816]: I0216 13:24:26.376323 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerDied","Data":"b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742"} Feb 16 13:24:26 crc kubenswrapper[4816]: I0216 13:24:26.376334 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerDied","Data":"9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40"} Feb 16 13:24:26 crc kubenswrapper[4816]: I0216 13:24:26.396925 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.3969066039999998 podStartE2EDuration="3.396906604s" podCreationTimestamp="2026-02-16 13:24:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:24:26.38831784 +0000 UTC m=+1265.715031558" watchObservedRunningTime="2026-02-16 13:24:26.396906604 +0000 UTC m=+1265.723620332" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.149211 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.197058 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvwrc\" (UniqueName: \"kubernetes.io/projected/e1004fce-1a7a-4783-88af-9147c41d5517-kube-api-access-kvwrc\") pod \"e1004fce-1a7a-4783-88af-9147c41d5517\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.197184 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-log-httpd\") pod \"e1004fce-1a7a-4783-88af-9147c41d5517\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.197383 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-sg-core-conf-yaml\") pod \"e1004fce-1a7a-4783-88af-9147c41d5517\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.197503 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-run-httpd\") pod \"e1004fce-1a7a-4783-88af-9147c41d5517\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.197574 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-config-data\") pod \"e1004fce-1a7a-4783-88af-9147c41d5517\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.197674 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-combined-ca-bundle\") pod \"e1004fce-1a7a-4783-88af-9147c41d5517\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.197752 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-scripts\") pod \"e1004fce-1a7a-4783-88af-9147c41d5517\" (UID: \"e1004fce-1a7a-4783-88af-9147c41d5517\") " Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.199854 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e1004fce-1a7a-4783-88af-9147c41d5517" (UID: "e1004fce-1a7a-4783-88af-9147c41d5517"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.200230 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e1004fce-1a7a-4783-88af-9147c41d5517" (UID: "e1004fce-1a7a-4783-88af-9147c41d5517"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.207127 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-scripts" (OuterVolumeSpecName: "scripts") pod "e1004fce-1a7a-4783-88af-9147c41d5517" (UID: "e1004fce-1a7a-4783-88af-9147c41d5517"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.209009 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1004fce-1a7a-4783-88af-9147c41d5517-kube-api-access-kvwrc" (OuterVolumeSpecName: "kube-api-access-kvwrc") pod "e1004fce-1a7a-4783-88af-9147c41d5517" (UID: "e1004fce-1a7a-4783-88af-9147c41d5517"). InnerVolumeSpecName "kube-api-access-kvwrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.258170 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e1004fce-1a7a-4783-88af-9147c41d5517" (UID: "e1004fce-1a7a-4783-88af-9147c41d5517"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.282362 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1004fce-1a7a-4783-88af-9147c41d5517" (UID: "e1004fce-1a7a-4783-88af-9147c41d5517"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.299749 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.300050 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.300114 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.300194 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvwrc\" (UniqueName: \"kubernetes.io/projected/e1004fce-1a7a-4783-88af-9147c41d5517-kube-api-access-kvwrc\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.300255 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1004fce-1a7a-4783-88af-9147c41d5517-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.300321 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.306549 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-config-data" (OuterVolumeSpecName: "config-data") pod "e1004fce-1a7a-4783-88af-9147c41d5517" (UID: "e1004fce-1a7a-4783-88af-9147c41d5517"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.402708 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1004fce-1a7a-4783-88af-9147c41d5517-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.436254 4816 generic.go:334] "Generic (PLEG): container finished" podID="e1004fce-1a7a-4783-88af-9147c41d5517" containerID="6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c" exitCode=0 Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.436340 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerDied","Data":"6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c"} Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.436756 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1004fce-1a7a-4783-88af-9147c41d5517","Type":"ContainerDied","Data":"dda7bbfd08b173369c2c2d7639337cf4d04ff30d14acf4a80797a025d0fc5ecd"} Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.436790 4816 scope.go:117] "RemoveContainer" containerID="012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.436458 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.482136 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.482452 4816 scope.go:117] "RemoveContainer" containerID="b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.496643 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.512719 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:29 crc kubenswrapper[4816]: E0216 13:24:29.513094 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="proxy-httpd" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.513108 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="proxy-httpd" Feb 16 13:24:29 crc kubenswrapper[4816]: E0216 13:24:29.513138 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="ceilometer-central-agent" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.513249 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="ceilometer-central-agent" Feb 16 13:24:29 crc kubenswrapper[4816]: E0216 13:24:29.513265 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="ceilometer-notification-agent" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.513271 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="ceilometer-notification-agent" Feb 16 13:24:29 crc kubenswrapper[4816]: E0216 13:24:29.513283 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="sg-core" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.513289 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="sg-core" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.513440 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="sg-core" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.513455 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="ceilometer-central-agent" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.513468 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="ceilometer-notification-agent" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.513482 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" containerName="proxy-httpd" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.514402 4816 scope.go:117] "RemoveContainer" containerID="9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.515014 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.519199 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.519369 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.524047 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.559874 4816 scope.go:117] "RemoveContainer" containerID="6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.587161 4816 scope.go:117] "RemoveContainer" containerID="012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74" Feb 16 13:24:29 crc kubenswrapper[4816]: E0216 13:24:29.587671 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74\": container with ID starting with 012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74 not found: ID does not exist" containerID="012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.587718 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74"} err="failed to get container status \"012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74\": rpc error: code = NotFound desc = could not find container \"012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74\": container with ID starting with 012d5900e1d6c96744219cfe49cdd20eda8e2d32c9f3a4c6b4e71a3bc5ddce74 not found: ID does not exist" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.587753 4816 scope.go:117] "RemoveContainer" containerID="b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742" Feb 16 13:24:29 crc kubenswrapper[4816]: E0216 13:24:29.588178 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742\": container with ID starting with b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742 not found: ID does not exist" containerID="b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.588219 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742"} err="failed to get container status \"b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742\": rpc error: code = NotFound desc = could not find container \"b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742\": container with ID starting with b5ac7bb135f89b8205da0b035c182f00ff52bbc869cbd9f84949ef407de35742 not found: ID does not exist" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.588244 4816 scope.go:117] "RemoveContainer" containerID="9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40" Feb 16 13:24:29 crc kubenswrapper[4816]: E0216 13:24:29.588503 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40\": container with ID starting with 9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40 not found: ID does not exist" containerID="9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.588534 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40"} err="failed to get container status \"9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40\": rpc error: code = NotFound desc = could not find container \"9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40\": container with ID starting with 9fbe270bbce98826625f9d5ddbae01d29e16735e507da210db0d04d9adf37f40 not found: ID does not exist" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.588555 4816 scope.go:117] "RemoveContainer" containerID="6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c" Feb 16 13:24:29 crc kubenswrapper[4816]: E0216 13:24:29.588920 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c\": container with ID starting with 6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c not found: ID does not exist" containerID="6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.588964 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c"} err="failed to get container status \"6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c\": rpc error: code = NotFound desc = could not find container \"6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c\": container with ID starting with 6c32924da47c9e4b5e6e91ebf87e06df09d830df2b4b37a7bab9f0f036189b0c not found: ID does not exist" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.609363 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-scripts\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.609430 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-log-httpd\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.609557 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-run-httpd\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.609797 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-config-data\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.609894 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6xw9\" (UniqueName: \"kubernetes.io/projected/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-kube-api-access-g6xw9\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.609932 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.610056 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.712138 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-scripts\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.712247 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-log-httpd\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.712321 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-run-httpd\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.712455 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-config-data\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.712539 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6xw9\" (UniqueName: \"kubernetes.io/projected/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-kube-api-access-g6xw9\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.712586 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.712713 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.713055 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-log-httpd\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.713209 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-run-httpd\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.718626 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-config-data\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.719196 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.719896 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-scripts\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.721693 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.735878 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6xw9\" (UniqueName: \"kubernetes.io/projected/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-kube-api-access-g6xw9\") pod \"ceilometer-0\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " pod="openstack/ceilometer-0" Feb 16 13:24:29 crc kubenswrapper[4816]: I0216 13:24:29.842735 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:24:30 crc kubenswrapper[4816]: I0216 13:24:30.336353 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:24:30 crc kubenswrapper[4816]: W0216 13:24:30.354042 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c7e2473_f87b_42be_a0e2_92ab00fdbd7d.slice/crio-efa64530580196be782ea4434e6a1b1263a196900be124467c0d9941a0d846c7 WatchSource:0}: Error finding container efa64530580196be782ea4434e6a1b1263a196900be124467c0d9941a0d846c7: Status 404 returned error can't find the container with id efa64530580196be782ea4434e6a1b1263a196900be124467c0d9941a0d846c7 Feb 16 13:24:30 crc kubenswrapper[4816]: I0216 13:24:30.449215 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerStarted","Data":"efa64530580196be782ea4434e6a1b1263a196900be124467c0d9941a0d846c7"} Feb 16 13:24:31 crc kubenswrapper[4816]: I0216 13:24:31.411114 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1004fce-1a7a-4783-88af-9147c41d5517" path="/var/lib/kubelet/pods/e1004fce-1a7a-4783-88af-9147c41d5517/volumes" Feb 16 13:24:31 crc kubenswrapper[4816]: I0216 13:24:31.459674 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerStarted","Data":"368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9"} Feb 16 13:24:31 crc kubenswrapper[4816]: I0216 13:24:31.900208 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 16 13:24:31 crc kubenswrapper[4816]: I0216 13:24:31.900481 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 16 13:24:31 crc kubenswrapper[4816]: I0216 13:24:31.930917 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 16 13:24:31 crc kubenswrapper[4816]: I0216 13:24:31.943637 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 16 13:24:32 crc kubenswrapper[4816]: I0216 13:24:32.478560 4816 generic.go:334] "Generic (PLEG): container finished" podID="c300f0d3-72e0-46f6-9910-9dda53a08e13" containerID="56e385b20d8b4b9b80ca388d17160eb68951aa4e48f004e0a892e63e727fe0b0" exitCode=0 Feb 16 13:24:32 crc kubenswrapper[4816]: I0216 13:24:32.478612 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-28k78" event={"ID":"c300f0d3-72e0-46f6-9910-9dda53a08e13","Type":"ContainerDied","Data":"56e385b20d8b4b9b80ca388d17160eb68951aa4e48f004e0a892e63e727fe0b0"} Feb 16 13:24:32 crc kubenswrapper[4816]: I0216 13:24:32.481237 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerStarted","Data":"abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429"} Feb 16 13:24:32 crc kubenswrapper[4816]: I0216 13:24:32.481281 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerStarted","Data":"14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec"} Feb 16 13:24:32 crc kubenswrapper[4816]: I0216 13:24:32.481453 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 16 13:24:32 crc kubenswrapper[4816]: I0216 13:24:32.481542 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 16 13:24:33 crc kubenswrapper[4816]: I0216 13:24:33.891352 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.014330 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-combined-ca-bundle\") pod \"c300f0d3-72e0-46f6-9910-9dda53a08e13\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.014731 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-config-data\") pod \"c300f0d3-72e0-46f6-9910-9dda53a08e13\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.014854 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kq6j\" (UniqueName: \"kubernetes.io/projected/c300f0d3-72e0-46f6-9910-9dda53a08e13-kube-api-access-7kq6j\") pod \"c300f0d3-72e0-46f6-9910-9dda53a08e13\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.015110 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-scripts\") pod \"c300f0d3-72e0-46f6-9910-9dda53a08e13\" (UID: \"c300f0d3-72e0-46f6-9910-9dda53a08e13\") " Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.019562 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-scripts" (OuterVolumeSpecName: "scripts") pod "c300f0d3-72e0-46f6-9910-9dda53a08e13" (UID: "c300f0d3-72e0-46f6-9910-9dda53a08e13"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.019868 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c300f0d3-72e0-46f6-9910-9dda53a08e13-kube-api-access-7kq6j" (OuterVolumeSpecName: "kube-api-access-7kq6j") pod "c300f0d3-72e0-46f6-9910-9dda53a08e13" (UID: "c300f0d3-72e0-46f6-9910-9dda53a08e13"). InnerVolumeSpecName "kube-api-access-7kq6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.047926 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.048160 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.052503 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-config-data" (OuterVolumeSpecName: "config-data") pod "c300f0d3-72e0-46f6-9910-9dda53a08e13" (UID: "c300f0d3-72e0-46f6-9910-9dda53a08e13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.068228 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c300f0d3-72e0-46f6-9910-9dda53a08e13" (UID: "c300f0d3-72e0-46f6-9910-9dda53a08e13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.093169 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.101460 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.117360 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.117400 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.117412 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c300f0d3-72e0-46f6-9910-9dda53a08e13-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.117420 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kq6j\" (UniqueName: \"kubernetes.io/projected/c300f0d3-72e0-46f6-9910-9dda53a08e13-kube-api-access-7kq6j\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.469403 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.514595 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerStarted","Data":"b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703"} Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.514927 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.518793 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.520065 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-28k78" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.520391 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-28k78" event={"ID":"c300f0d3-72e0-46f6-9910-9dda53a08e13","Type":"ContainerDied","Data":"d1e819b93270b49a3321359b9bdb0b4156c56db6d422d60f2049e4fd4d9e7978"} Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.520441 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1e819b93270b49a3321359b9bdb0b4156c56db6d422d60f2049e4fd4d9e7978" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.522168 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.522213 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.566372 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.567948 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.197279953 podStartE2EDuration="5.567927358s" podCreationTimestamp="2026-02-16 13:24:29 +0000 UTC" firstStartedPulling="2026-02-16 13:24:30.357392286 +0000 UTC m=+1269.684106024" lastFinishedPulling="2026-02-16 13:24:33.728039701 +0000 UTC m=+1273.054753429" observedRunningTime="2026-02-16 13:24:34.553397062 +0000 UTC m=+1273.880110800" watchObservedRunningTime="2026-02-16 13:24:34.567927358 +0000 UTC m=+1273.894641106" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.713411 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 13:24:34 crc kubenswrapper[4816]: E0216 13:24:34.714047 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c300f0d3-72e0-46f6-9910-9dda53a08e13" containerName="nova-cell0-conductor-db-sync" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.714065 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c300f0d3-72e0-46f6-9910-9dda53a08e13" containerName="nova-cell0-conductor-db-sync" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.714249 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c300f0d3-72e0-46f6-9910-9dda53a08e13" containerName="nova-cell0-conductor-db-sync" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.714878 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.717673 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.717820 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mrtw9" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.730932 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.849046 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.849405 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnr64\" (UniqueName: \"kubernetes.io/projected/b266490b-4d0a-4463-8818-2bcdc39cdf88-kube-api-access-vnr64\") pod \"nova-cell0-conductor-0\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.849565 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.950911 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.951127 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.951173 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnr64\" (UniqueName: \"kubernetes.io/projected/b266490b-4d0a-4463-8818-2bcdc39cdf88-kube-api-access-vnr64\") pod \"nova-cell0-conductor-0\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.960851 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.961120 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:34 crc kubenswrapper[4816]: I0216 13:24:34.984281 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnr64\" (UniqueName: \"kubernetes.io/projected/b266490b-4d0a-4463-8818-2bcdc39cdf88-kube-api-access-vnr64\") pod \"nova-cell0-conductor-0\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:35 crc kubenswrapper[4816]: I0216 13:24:35.034691 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:35 crc kubenswrapper[4816]: I0216 13:24:35.543356 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 13:24:35 crc kubenswrapper[4816]: W0216 13:24:35.550830 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb266490b_4d0a_4463_8818_2bcdc39cdf88.slice/crio-ab94b4f69abff1a90ad8843f5b4ab047033b7c787703f3006373bef714faf201 WatchSource:0}: Error finding container ab94b4f69abff1a90ad8843f5b4ab047033b7c787703f3006373bef714faf201: Status 404 returned error can't find the container with id ab94b4f69abff1a90ad8843f5b4ab047033b7c787703f3006373bef714faf201 Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.551414 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.551757 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.551588 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b266490b-4d0a-4463-8818-2bcdc39cdf88","Type":"ContainerStarted","Data":"61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437"} Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.551816 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b266490b-4d0a-4463-8818-2bcdc39cdf88","Type":"ContainerStarted","Data":"ab94b4f69abff1a90ad8843f5b4ab047033b7c787703f3006373bef714faf201"} Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.552064 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.573884 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.573870351 podStartE2EDuration="2.573870351s" podCreationTimestamp="2026-02-16 13:24:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:24:36.569733169 +0000 UTC m=+1275.896446917" watchObservedRunningTime="2026-02-16 13:24:36.573870351 +0000 UTC m=+1275.900584079" Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.851541 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.856796 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.940599 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.940697 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.940744 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.941525 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a16107fccce9c93e96a6d43d25ee1381b11a663b98df0e1296331b66fbfb375f"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:24:36 crc kubenswrapper[4816]: I0216 13:24:36.941604 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://a16107fccce9c93e96a6d43d25ee1381b11a663b98df0e1296331b66fbfb375f" gracePeriod=600 Feb 16 13:24:37 crc kubenswrapper[4816]: I0216 13:24:37.561582 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="a16107fccce9c93e96a6d43d25ee1381b11a663b98df0e1296331b66fbfb375f" exitCode=0 Feb 16 13:24:37 crc kubenswrapper[4816]: I0216 13:24:37.561639 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"a16107fccce9c93e96a6d43d25ee1381b11a663b98df0e1296331b66fbfb375f"} Feb 16 13:24:37 crc kubenswrapper[4816]: I0216 13:24:37.561964 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"95aa8a6a6708ed2afc1dbaedfa775efb1b58e3f80a0695507163b402563b9cf2"} Feb 16 13:24:37 crc kubenswrapper[4816]: I0216 13:24:37.561989 4816 scope.go:117] "RemoveContainer" containerID="64ab67741e223081f84c6d63a99c0d895038e507375b2c1f1a0cf120b6972be0" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.063164 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.756578 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-l7mtz"] Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.758016 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.760502 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.761228 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.765588 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-l7mtz"] Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.888540 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfjpf\" (UniqueName: \"kubernetes.io/projected/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-kube-api-access-nfjpf\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.888625 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-scripts\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.888707 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.888748 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-config-data\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.921723 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.933867 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.936316 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.936406 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.993415 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.993465 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.993499 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-config-data\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.993688 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.993787 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfjpf\" (UniqueName: \"kubernetes.io/projected/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-kube-api-access-nfjpf\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.993918 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-scripts\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:40 crc kubenswrapper[4816]: I0216 13:24:40.995897 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsjcz\" (UniqueName: \"kubernetes.io/projected/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-kube-api-access-wsjcz\") pod \"nova-cell1-novncproxy-0\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.000158 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-config-data\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.000304 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.009303 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-scripts\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.020451 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfjpf\" (UniqueName: \"kubernetes.io/projected/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-kube-api-access-nfjpf\") pod \"nova-cell0-cell-mapping-l7mtz\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.078256 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.101270 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsjcz\" (UniqueName: \"kubernetes.io/projected/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-kube-api-access-wsjcz\") pod \"nova-cell1-novncproxy-0\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.101320 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.101387 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.112713 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.124578 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsjcz\" (UniqueName: \"kubernetes.io/projected/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-kube-api-access-wsjcz\") pod \"nova-cell1-novncproxy-0\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.126696 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.156720 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.158265 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.166418 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.209708 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.254895 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.256739 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.263079 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.264022 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.304698 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfc8fa9e-2344-42f9-a6e1-47e141343459-logs\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.304760 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbdn7\" (UniqueName: \"kubernetes.io/projected/dfc8fa9e-2344-42f9-a6e1-47e141343459-kube-api-access-bbdn7\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.304790 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.304903 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-config-data\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.323745 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.349231 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.350436 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.353565 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.359791 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-khvwc"] Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.361559 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.379966 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.390487 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-khvwc"] Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407404 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-config-data\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407541 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-config-data\") pod \"nova-scheduler-0\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407581 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-config-data\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407609 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407631 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbfdm\" (UniqueName: \"kubernetes.io/projected/a4cdd57a-f04a-476a-91a5-144d323b3c29-kube-api-access-dbfdm\") pod \"nova-scheduler-0\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407683 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e37be25-de5b-43dd-b779-6fad866f07f3-logs\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407713 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407730 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfc8fa9e-2344-42f9-a6e1-47e141343459-logs\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407753 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttdsh\" (UniqueName: \"kubernetes.io/projected/3e37be25-de5b-43dd-b779-6fad866f07f3-kube-api-access-ttdsh\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407772 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbdn7\" (UniqueName: \"kubernetes.io/projected/dfc8fa9e-2344-42f9-a6e1-47e141343459-kube-api-access-bbdn7\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.407792 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.411442 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfc8fa9e-2344-42f9-a6e1-47e141343459-logs\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.423497 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-config-data\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.433583 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.472014 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbdn7\" (UniqueName: \"kubernetes.io/projected/dfc8fa9e-2344-42f9-a6e1-47e141343459-kube-api-access-bbdn7\") pod \"nova-metadata-0\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.510926 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-svc\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511007 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511111 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvqdt\" (UniqueName: \"kubernetes.io/projected/d5392b80-e3a0-4695-a019-6eb5f23ca01c-kube-api-access-gvqdt\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511181 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-config-data\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511212 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-config-data\") pod \"nova-scheduler-0\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511240 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511279 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-config\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511346 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511382 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbfdm\" (UniqueName: \"kubernetes.io/projected/a4cdd57a-f04a-476a-91a5-144d323b3c29-kube-api-access-dbfdm\") pod \"nova-scheduler-0\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511439 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e37be25-de5b-43dd-b779-6fad866f07f3-logs\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511475 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511514 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.511551 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttdsh\" (UniqueName: \"kubernetes.io/projected/3e37be25-de5b-43dd-b779-6fad866f07f3-kube-api-access-ttdsh\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.528347 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-config-data\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.534139 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e37be25-de5b-43dd-b779-6fad866f07f3-logs\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.536475 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-config-data\") pod \"nova-scheduler-0\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.547888 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.548696 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.550001 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttdsh\" (UniqueName: \"kubernetes.io/projected/3e37be25-de5b-43dd-b779-6fad866f07f3-kube-api-access-ttdsh\") pod \"nova-api-0\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.563080 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbfdm\" (UniqueName: \"kubernetes.io/projected/a4cdd57a-f04a-476a-91a5-144d323b3c29-kube-api-access-dbfdm\") pod \"nova-scheduler-0\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.593631 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.616230 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.616568 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-svc\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.616605 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.616671 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvqdt\" (UniqueName: \"kubernetes.io/projected/d5392b80-e3a0-4695-a019-6eb5f23ca01c-kube-api-access-gvqdt\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.616731 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.616765 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-config\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.617444 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.617752 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-svc\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.618056 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-config\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.618297 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.618937 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.619374 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.635575 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvqdt\" (UniqueName: \"kubernetes.io/projected/d5392b80-e3a0-4695-a019-6eb5f23ca01c-kube-api-access-gvqdt\") pod \"dnsmasq-dns-865f5d856f-khvwc\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.680151 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.706987 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.861462 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-l7mtz"] Feb 16 13:24:41 crc kubenswrapper[4816]: I0216 13:24:41.984475 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.130892 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46pcb"] Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.132380 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.141275 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46pcb"] Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.151231 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.151901 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.228987 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-config-data\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.229144 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.229257 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxfs7\" (UniqueName: \"kubernetes.io/projected/25fd1ebc-4f40-4a30-8685-05b050cca498-kube-api-access-nxfs7\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.229340 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-scripts\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.331445 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-config-data\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.331812 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.331849 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxfs7\" (UniqueName: \"kubernetes.io/projected/25fd1ebc-4f40-4a30-8685-05b050cca498-kube-api-access-nxfs7\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.331872 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-scripts\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.339458 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-scripts\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.342181 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-config-data\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.343183 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.385454 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxfs7\" (UniqueName: \"kubernetes.io/projected/25fd1ebc-4f40-4a30-8685-05b050cca498-kube-api-access-nxfs7\") pod \"nova-cell1-conductor-db-sync-46pcb\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.400638 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.462545 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.464982 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:42 crc kubenswrapper[4816]: W0216 13:24:42.465206 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4cdd57a_f04a_476a_91a5_144d323b3c29.slice/crio-5ffb43fcb7ce9457bf59c6f13de8c1e90231ae0cac0afcba0c251f7dff37342b WatchSource:0}: Error finding container 5ffb43fcb7ce9457bf59c6f13de8c1e90231ae0cac0afcba0c251f7dff37342b: Status 404 returned error can't find the container with id 5ffb43fcb7ce9457bf59c6f13de8c1e90231ae0cac0afcba0c251f7dff37342b Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.484322 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.591254 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-khvwc"] Feb 16 13:24:42 crc kubenswrapper[4816]: W0216 13:24:42.608939 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5392b80_e3a0_4695_a019_6eb5f23ca01c.slice/crio-b3770c757a546e04ae303134d3bb9c2f8aa0dcb54f2c5cebbb11e49106f3b69d WatchSource:0}: Error finding container b3770c757a546e04ae303134d3bb9c2f8aa0dcb54f2c5cebbb11e49106f3b69d: Status 404 returned error can't find the container with id b3770c757a546e04ae303134d3bb9c2f8aa0dcb54f2c5cebbb11e49106f3b69d Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.640081 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8a176030-0545-4a2b-b9c8-cce6a69dfdbe","Type":"ContainerStarted","Data":"30d9610102c6f411831f23e50d4397f71defe39fc5789736d55487f2818e1509"} Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.642393 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" event={"ID":"d5392b80-e3a0-4695-a019-6eb5f23ca01c","Type":"ContainerStarted","Data":"b3770c757a546e04ae303134d3bb9c2f8aa0dcb54f2c5cebbb11e49106f3b69d"} Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.644221 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a4cdd57a-f04a-476a-91a5-144d323b3c29","Type":"ContainerStarted","Data":"5ffb43fcb7ce9457bf59c6f13de8c1e90231ae0cac0afcba0c251f7dff37342b"} Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.646560 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dfc8fa9e-2344-42f9-a6e1-47e141343459","Type":"ContainerStarted","Data":"2f46bea56727f9fc19a5d4a25c5c82a1b4070fe5b36145d3eaf3ca25f2de8158"} Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.652175 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e37be25-de5b-43dd-b779-6fad866f07f3","Type":"ContainerStarted","Data":"9a11088911298a7596f4bd7ac8e5eb0ed517775d6cbb271e7800c62b608731b4"} Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.657044 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-l7mtz" event={"ID":"2e221f94-9c2b-4f98-bc3b-3342bc071e6c","Type":"ContainerStarted","Data":"ffcda6952756ad6d6adbadba9e616f946581ed750ee58705820ad16489371010"} Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.657098 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-l7mtz" event={"ID":"2e221f94-9c2b-4f98-bc3b-3342bc071e6c","Type":"ContainerStarted","Data":"6686dbc3d6057e094e17a6041d38bd9e3a7b255468a48f56cb721dcd96247c56"} Feb 16 13:24:42 crc kubenswrapper[4816]: I0216 13:24:42.678550 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-l7mtz" podStartSLOduration=2.678522503 podStartE2EDuration="2.678522503s" podCreationTimestamp="2026-02-16 13:24:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:24:42.6725417 +0000 UTC m=+1281.999255428" watchObservedRunningTime="2026-02-16 13:24:42.678522503 +0000 UTC m=+1282.005236231" Feb 16 13:24:43 crc kubenswrapper[4816]: I0216 13:24:43.000799 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46pcb"] Feb 16 13:24:43 crc kubenswrapper[4816]: W0216 13:24:43.014608 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25fd1ebc_4f40_4a30_8685_05b050cca498.slice/crio-55ebdd3284774287683e61371e31216ab34887eae9272513877a13403df74282 WatchSource:0}: Error finding container 55ebdd3284774287683e61371e31216ab34887eae9272513877a13403df74282: Status 404 returned error can't find the container with id 55ebdd3284774287683e61371e31216ab34887eae9272513877a13403df74282 Feb 16 13:24:43 crc kubenswrapper[4816]: I0216 13:24:43.698847 4816 generic.go:334] "Generic (PLEG): container finished" podID="d5392b80-e3a0-4695-a019-6eb5f23ca01c" containerID="8d9d8628d67d6b4cddd6e243d4b090aa9289ab58d9a0db8e8d50f4d420889b21" exitCode=0 Feb 16 13:24:43 crc kubenswrapper[4816]: I0216 13:24:43.699194 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" event={"ID":"d5392b80-e3a0-4695-a019-6eb5f23ca01c","Type":"ContainerDied","Data":"8d9d8628d67d6b4cddd6e243d4b090aa9289ab58d9a0db8e8d50f4d420889b21"} Feb 16 13:24:43 crc kubenswrapper[4816]: I0216 13:24:43.724749 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-46pcb" event={"ID":"25fd1ebc-4f40-4a30-8685-05b050cca498","Type":"ContainerStarted","Data":"55ebdd3284774287683e61371e31216ab34887eae9272513877a13403df74282"} Feb 16 13:24:44 crc kubenswrapper[4816]: I0216 13:24:44.741220 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-46pcb" event={"ID":"25fd1ebc-4f40-4a30-8685-05b050cca498","Type":"ContainerStarted","Data":"de70394506e788b0a9d01206ee022d4664882aace80a609a88569836fca38d8d"} Feb 16 13:24:44 crc kubenswrapper[4816]: I0216 13:24:44.763854 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-46pcb" podStartSLOduration=2.763829984 podStartE2EDuration="2.763829984s" podCreationTimestamp="2026-02-16 13:24:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:24:44.756140483 +0000 UTC m=+1284.082854211" watchObservedRunningTime="2026-02-16 13:24:44.763829984 +0000 UTC m=+1284.090543712" Feb 16 13:24:45 crc kubenswrapper[4816]: I0216 13:24:45.345561 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:45 crc kubenswrapper[4816]: I0216 13:24:45.372980 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.765383 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" event={"ID":"d5392b80-e3a0-4695-a019-6eb5f23ca01c","Type":"ContainerStarted","Data":"5c0de90e8dcfab5d056886954e128878aef7cca94e4c5037f744e9127ed2ae39"} Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.765850 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.768645 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a4cdd57a-f04a-476a-91a5-144d323b3c29","Type":"ContainerStarted","Data":"9000a125f98582a9cd007bbfa4d8257b42088c4d96b72af88e7c44130ed6d549"} Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.771044 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dfc8fa9e-2344-42f9-a6e1-47e141343459","Type":"ContainerStarted","Data":"d9fc6c343abd74ee8d6a07cccee8b0dfa8b3d649e3953364120a36b4b19f99b6"} Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.771110 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dfc8fa9e-2344-42f9-a6e1-47e141343459","Type":"ContainerStarted","Data":"76ec843c703c6b17fc54715a57c64cbeba5f4bb5c4680e10148f367c247fc0b6"} Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.771122 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerName="nova-metadata-log" containerID="cri-o://76ec843c703c6b17fc54715a57c64cbeba5f4bb5c4680e10148f367c247fc0b6" gracePeriod=30 Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.771169 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerName="nova-metadata-metadata" containerID="cri-o://d9fc6c343abd74ee8d6a07cccee8b0dfa8b3d649e3953364120a36b4b19f99b6" gracePeriod=30 Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.773508 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e37be25-de5b-43dd-b779-6fad866f07f3","Type":"ContainerStarted","Data":"9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076"} Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.773559 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e37be25-de5b-43dd-b779-6fad866f07f3","Type":"ContainerStarted","Data":"ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02"} Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.778419 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8a176030-0545-4a2b-b9c8-cce6a69dfdbe","Type":"ContainerStarted","Data":"3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e"} Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.778644 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="8a176030-0545-4a2b-b9c8-cce6a69dfdbe" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e" gracePeriod=30 Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.822554 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" podStartSLOduration=5.822530858 podStartE2EDuration="5.822530858s" podCreationTimestamp="2026-02-16 13:24:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:24:46.816886734 +0000 UTC m=+1286.143600462" watchObservedRunningTime="2026-02-16 13:24:46.822530858 +0000 UTC m=+1286.149244586" Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.885118 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.832759315 podStartE2EDuration="5.885093096s" podCreationTimestamp="2026-02-16 13:24:41 +0000 UTC" firstStartedPulling="2026-02-16 13:24:42.537002578 +0000 UTC m=+1281.863716296" lastFinishedPulling="2026-02-16 13:24:45.589336349 +0000 UTC m=+1284.916050077" observedRunningTime="2026-02-16 13:24:46.870344644 +0000 UTC m=+1286.197058372" watchObservedRunningTime="2026-02-16 13:24:46.885093096 +0000 UTC m=+1286.211806844" Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.934642 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.416272422 podStartE2EDuration="6.934618579s" podCreationTimestamp="2026-02-16 13:24:40 +0000 UTC" firstStartedPulling="2026-02-16 13:24:42.042177034 +0000 UTC m=+1281.368890772" lastFinishedPulling="2026-02-16 13:24:45.560523201 +0000 UTC m=+1284.887236929" observedRunningTime="2026-02-16 13:24:46.909419831 +0000 UTC m=+1286.236133569" watchObservedRunningTime="2026-02-16 13:24:46.934618579 +0000 UTC m=+1286.261332297" Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.972073 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.84686182 podStartE2EDuration="5.972054401s" podCreationTimestamp="2026-02-16 13:24:41 +0000 UTC" firstStartedPulling="2026-02-16 13:24:42.478370636 +0000 UTC m=+1281.805084364" lastFinishedPulling="2026-02-16 13:24:45.603563217 +0000 UTC m=+1284.930276945" observedRunningTime="2026-02-16 13:24:46.965914344 +0000 UTC m=+1286.292628072" watchObservedRunningTime="2026-02-16 13:24:46.972054401 +0000 UTC m=+1286.298768129" Feb 16 13:24:46 crc kubenswrapper[4816]: I0216 13:24:46.995074 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.810561959 podStartE2EDuration="5.99505597s" podCreationTimestamp="2026-02-16 13:24:41 +0000 UTC" firstStartedPulling="2026-02-16 13:24:42.405051814 +0000 UTC m=+1281.731765542" lastFinishedPulling="2026-02-16 13:24:45.589545825 +0000 UTC m=+1284.916259553" observedRunningTime="2026-02-16 13:24:46.994785032 +0000 UTC m=+1286.321498760" watchObservedRunningTime="2026-02-16 13:24:46.99505597 +0000 UTC m=+1286.321769698" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.790975 4816 generic.go:334] "Generic (PLEG): container finished" podID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerID="d9fc6c343abd74ee8d6a07cccee8b0dfa8b3d649e3953364120a36b4b19f99b6" exitCode=0 Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.791216 4816 generic.go:334] "Generic (PLEG): container finished" podID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerID="76ec843c703c6b17fc54715a57c64cbeba5f4bb5c4680e10148f367c247fc0b6" exitCode=143 Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.791944 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dfc8fa9e-2344-42f9-a6e1-47e141343459","Type":"ContainerDied","Data":"d9fc6c343abd74ee8d6a07cccee8b0dfa8b3d649e3953364120a36b4b19f99b6"} Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.791967 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dfc8fa9e-2344-42f9-a6e1-47e141343459","Type":"ContainerDied","Data":"76ec843c703c6b17fc54715a57c64cbeba5f4bb5c4680e10148f367c247fc0b6"} Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.791978 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dfc8fa9e-2344-42f9-a6e1-47e141343459","Type":"ContainerDied","Data":"2f46bea56727f9fc19a5d4a25c5c82a1b4070fe5b36145d3eaf3ca25f2de8158"} Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.791989 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f46bea56727f9fc19a5d4a25c5c82a1b4070fe5b36145d3eaf3ca25f2de8158" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.839448 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.887266 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfc8fa9e-2344-42f9-a6e1-47e141343459-logs\") pod \"dfc8fa9e-2344-42f9-a6e1-47e141343459\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.887417 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-config-data\") pod \"dfc8fa9e-2344-42f9-a6e1-47e141343459\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.887497 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbdn7\" (UniqueName: \"kubernetes.io/projected/dfc8fa9e-2344-42f9-a6e1-47e141343459-kube-api-access-bbdn7\") pod \"dfc8fa9e-2344-42f9-a6e1-47e141343459\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.887516 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-combined-ca-bundle\") pod \"dfc8fa9e-2344-42f9-a6e1-47e141343459\" (UID: \"dfc8fa9e-2344-42f9-a6e1-47e141343459\") " Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.888643 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfc8fa9e-2344-42f9-a6e1-47e141343459-logs" (OuterVolumeSpecName: "logs") pod "dfc8fa9e-2344-42f9-a6e1-47e141343459" (UID: "dfc8fa9e-2344-42f9-a6e1-47e141343459"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.898438 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfc8fa9e-2344-42f9-a6e1-47e141343459-kube-api-access-bbdn7" (OuterVolumeSpecName: "kube-api-access-bbdn7") pod "dfc8fa9e-2344-42f9-a6e1-47e141343459" (UID: "dfc8fa9e-2344-42f9-a6e1-47e141343459"). InnerVolumeSpecName "kube-api-access-bbdn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.917614 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfc8fa9e-2344-42f9-a6e1-47e141343459" (UID: "dfc8fa9e-2344-42f9-a6e1-47e141343459"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.925350 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-config-data" (OuterVolumeSpecName: "config-data") pod "dfc8fa9e-2344-42f9-a6e1-47e141343459" (UID: "dfc8fa9e-2344-42f9-a6e1-47e141343459"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.989711 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.989743 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbdn7\" (UniqueName: \"kubernetes.io/projected/dfc8fa9e-2344-42f9-a6e1-47e141343459-kube-api-access-bbdn7\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.989754 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc8fa9e-2344-42f9-a6e1-47e141343459-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:47 crc kubenswrapper[4816]: I0216 13:24:47.989763 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfc8fa9e-2344-42f9-a6e1-47e141343459-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.802125 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.849443 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.867465 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.886495 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:48 crc kubenswrapper[4816]: E0216 13:24:48.887374 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerName="nova-metadata-metadata" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.887465 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerName="nova-metadata-metadata" Feb 16 13:24:48 crc kubenswrapper[4816]: E0216 13:24:48.887576 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerName="nova-metadata-log" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.887682 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerName="nova-metadata-log" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.887971 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerName="nova-metadata-metadata" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.888056 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfc8fa9e-2344-42f9-a6e1-47e141343459" containerName="nova-metadata-log" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.889441 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.895191 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.895226 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 16 13:24:48 crc kubenswrapper[4816]: I0216 13:24:48.905286 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.007798 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.007857 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-config-data\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.007890 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.007973 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgzhc\" (UniqueName: \"kubernetes.io/projected/97014da5-d64b-4e4a-9b79-fb186d064cd8-kube-api-access-pgzhc\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.008008 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97014da5-d64b-4e4a-9b79-fb186d064cd8-logs\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.109788 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.109837 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-config-data\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.109865 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.109904 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgzhc\" (UniqueName: \"kubernetes.io/projected/97014da5-d64b-4e4a-9b79-fb186d064cd8-kube-api-access-pgzhc\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.109930 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97014da5-d64b-4e4a-9b79-fb186d064cd8-logs\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.110707 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97014da5-d64b-4e4a-9b79-fb186d064cd8-logs\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.115421 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.120257 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.130029 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-config-data\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.130251 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgzhc\" (UniqueName: \"kubernetes.io/projected/97014da5-d64b-4e4a-9b79-fb186d064cd8-kube-api-access-pgzhc\") pod \"nova-metadata-0\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.212009 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.414780 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfc8fa9e-2344-42f9-a6e1-47e141343459" path="/var/lib/kubelet/pods/dfc8fa9e-2344-42f9-a6e1-47e141343459/volumes" Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.693285 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:49 crc kubenswrapper[4816]: W0216 13:24:49.694030 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97014da5_d64b_4e4a_9b79_fb186d064cd8.slice/crio-26df1d12c13f4183eaf034a4ea07088d601bef48ee43fbb6aceaa6ab5e74f9bf WatchSource:0}: Error finding container 26df1d12c13f4183eaf034a4ea07088d601bef48ee43fbb6aceaa6ab5e74f9bf: Status 404 returned error can't find the container with id 26df1d12c13f4183eaf034a4ea07088d601bef48ee43fbb6aceaa6ab5e74f9bf Feb 16 13:24:49 crc kubenswrapper[4816]: I0216 13:24:49.828912 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97014da5-d64b-4e4a-9b79-fb186d064cd8","Type":"ContainerStarted","Data":"26df1d12c13f4183eaf034a4ea07088d601bef48ee43fbb6aceaa6ab5e74f9bf"} Feb 16 13:24:50 crc kubenswrapper[4816]: I0216 13:24:50.840873 4816 generic.go:334] "Generic (PLEG): container finished" podID="2e221f94-9c2b-4f98-bc3b-3342bc071e6c" containerID="ffcda6952756ad6d6adbadba9e616f946581ed750ee58705820ad16489371010" exitCode=0 Feb 16 13:24:50 crc kubenswrapper[4816]: I0216 13:24:50.840901 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-l7mtz" event={"ID":"2e221f94-9c2b-4f98-bc3b-3342bc071e6c","Type":"ContainerDied","Data":"ffcda6952756ad6d6adbadba9e616f946581ed750ee58705820ad16489371010"} Feb 16 13:24:50 crc kubenswrapper[4816]: I0216 13:24:50.843574 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97014da5-d64b-4e4a-9b79-fb186d064cd8","Type":"ContainerStarted","Data":"236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897"} Feb 16 13:24:50 crc kubenswrapper[4816]: I0216 13:24:50.843618 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97014da5-d64b-4e4a-9b79-fb186d064cd8","Type":"ContainerStarted","Data":"7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0"} Feb 16 13:24:50 crc kubenswrapper[4816]: I0216 13:24:50.881054 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.881030978 podStartE2EDuration="2.881030978s" podCreationTimestamp="2026-02-16 13:24:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:24:50.872094673 +0000 UTC m=+1290.198808441" watchObservedRunningTime="2026-02-16 13:24:50.881030978 +0000 UTC m=+1290.207744726" Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.269273 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.621018 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.621268 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.681158 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.681240 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.709100 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.709274 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.820412 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-b24bk"] Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.820719 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" podUID="e1c78c8f-f023-4a68-b9ce-52b09090a1e2" containerName="dnsmasq-dns" containerID="cri-o://310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52" gracePeriod=10 Feb 16 13:24:51 crc kubenswrapper[4816]: I0216 13:24:51.896519 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.365146 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.399707 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfjpf\" (UniqueName: \"kubernetes.io/projected/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-kube-api-access-nfjpf\") pod \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.400101 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-scripts\") pod \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.400171 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-config-data\") pod \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.437901 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-kube-api-access-nfjpf" (OuterVolumeSpecName: "kube-api-access-nfjpf") pod "2e221f94-9c2b-4f98-bc3b-3342bc071e6c" (UID: "2e221f94-9c2b-4f98-bc3b-3342bc071e6c"). InnerVolumeSpecName "kube-api-access-nfjpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.438526 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-scripts" (OuterVolumeSpecName: "scripts") pod "2e221f94-9c2b-4f98-bc3b-3342bc071e6c" (UID: "2e221f94-9c2b-4f98-bc3b-3342bc071e6c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.495590 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-config-data" (OuterVolumeSpecName: "config-data") pod "2e221f94-9c2b-4f98-bc3b-3342bc071e6c" (UID: "2e221f94-9c2b-4f98-bc3b-3342bc071e6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.502420 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-combined-ca-bundle\") pod \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\" (UID: \"2e221f94-9c2b-4f98-bc3b-3342bc071e6c\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.504185 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfjpf\" (UniqueName: \"kubernetes.io/projected/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-kube-api-access-nfjpf\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.504229 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.504242 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.553036 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.577074 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e221f94-9c2b-4f98-bc3b-3342bc071e6c" (UID: "2e221f94-9c2b-4f98-bc3b-3342bc071e6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.608075 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-swift-storage-0\") pod \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.608255 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-svc\") pod \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.608298 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-nb\") pod \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.608346 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxmtq\" (UniqueName: \"kubernetes.io/projected/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-kube-api-access-jxmtq\") pod \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.608367 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-config\") pod \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.608384 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-sb\") pod \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\" (UID: \"e1c78c8f-f023-4a68-b9ce-52b09090a1e2\") " Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.608725 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e221f94-9c2b-4f98-bc3b-3342bc071e6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.621016 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.186:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.621335 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.186:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.665979 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e1c78c8f-f023-4a68-b9ce-52b09090a1e2" (UID: "e1c78c8f-f023-4a68-b9ce-52b09090a1e2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.695135 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e1c78c8f-f023-4a68-b9ce-52b09090a1e2" (UID: "e1c78c8f-f023-4a68-b9ce-52b09090a1e2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.708831 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e1c78c8f-f023-4a68-b9ce-52b09090a1e2" (UID: "e1c78c8f-f023-4a68-b9ce-52b09090a1e2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.710539 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.710562 4816 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.710573 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.709210 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-config" (OuterVolumeSpecName: "config") pod "e1c78c8f-f023-4a68-b9ce-52b09090a1e2" (UID: "e1c78c8f-f023-4a68-b9ce-52b09090a1e2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.718111 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e1c78c8f-f023-4a68-b9ce-52b09090a1e2" (UID: "e1c78c8f-f023-4a68-b9ce-52b09090a1e2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.718833 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-kube-api-access-jxmtq" (OuterVolumeSpecName: "kube-api-access-jxmtq") pod "e1c78c8f-f023-4a68-b9ce-52b09090a1e2" (UID: "e1c78c8f-f023-4a68-b9ce-52b09090a1e2"). InnerVolumeSpecName "kube-api-access-jxmtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.818153 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.818191 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxmtq\" (UniqueName: \"kubernetes.io/projected/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-kube-api-access-jxmtq\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.818201 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1c78c8f-f023-4a68-b9ce-52b09090a1e2-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.864640 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-l7mtz" event={"ID":"2e221f94-9c2b-4f98-bc3b-3342bc071e6c","Type":"ContainerDied","Data":"6686dbc3d6057e094e17a6041d38bd9e3a7b255468a48f56cb721dcd96247c56"} Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.864680 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-l7mtz" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.864694 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6686dbc3d6057e094e17a6041d38bd9e3a7b255468a48f56cb721dcd96247c56" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.867788 4816 generic.go:334] "Generic (PLEG): container finished" podID="e1c78c8f-f023-4a68-b9ce-52b09090a1e2" containerID="310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52" exitCode=0 Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.867828 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" event={"ID":"e1c78c8f-f023-4a68-b9ce-52b09090a1e2","Type":"ContainerDied","Data":"310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52"} Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.867851 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.867889 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-b24bk" event={"ID":"e1c78c8f-f023-4a68-b9ce-52b09090a1e2","Type":"ContainerDied","Data":"11612a57ba2235e3d89c6c8f3507138f19691239dfcff7fda51eccf749173941"} Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.867947 4816 scope.go:117] "RemoveContainer" containerID="310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.870486 4816 generic.go:334] "Generic (PLEG): container finished" podID="25fd1ebc-4f40-4a30-8685-05b050cca498" containerID="de70394506e788b0a9d01206ee022d4664882aace80a609a88569836fca38d8d" exitCode=0 Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.871107 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-46pcb" event={"ID":"25fd1ebc-4f40-4a30-8685-05b050cca498","Type":"ContainerDied","Data":"de70394506e788b0a9d01206ee022d4664882aace80a609a88569836fca38d8d"} Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.908349 4816 scope.go:117] "RemoveContainer" containerID="c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.953401 4816 scope.go:117] "RemoveContainer" containerID="310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52" Feb 16 13:24:52 crc kubenswrapper[4816]: E0216 13:24:52.953872 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52\": container with ID starting with 310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52 not found: ID does not exist" containerID="310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.953904 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52"} err="failed to get container status \"310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52\": rpc error: code = NotFound desc = could not find container \"310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52\": container with ID starting with 310e3bff7f8f4d1525fc60f9d5577ba7b23da20197ce4a578703e6dd33c65f52 not found: ID does not exist" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.953923 4816 scope.go:117] "RemoveContainer" containerID="c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2" Feb 16 13:24:52 crc kubenswrapper[4816]: E0216 13:24:52.954228 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2\": container with ID starting with c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2 not found: ID does not exist" containerID="c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.954249 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2"} err="failed to get container status \"c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2\": rpc error: code = NotFound desc = could not find container \"c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2\": container with ID starting with c0bf9e3c64a1bbbfa83acde9444ec8e17b4a892de8569b40b384acceebf380a2 not found: ID does not exist" Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.955056 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-b24bk"] Feb 16 13:24:52 crc kubenswrapper[4816]: I0216 13:24:52.971817 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-b24bk"] Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.043731 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.072956 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.073194 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-log" containerID="cri-o://ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02" gracePeriod=30 Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.073335 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-api" containerID="cri-o://9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076" gracePeriod=30 Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.099011 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.099255 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerName="nova-metadata-log" containerID="cri-o://7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0" gracePeriod=30 Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.099403 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerName="nova-metadata-metadata" containerID="cri-o://236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897" gracePeriod=30 Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.413698 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1c78c8f-f023-4a68-b9ce-52b09090a1e2" path="/var/lib/kubelet/pods/e1c78c8f-f023-4a68-b9ce-52b09090a1e2/volumes" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.726022 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.839382 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-config-data\") pod \"97014da5-d64b-4e4a-9b79-fb186d064cd8\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.839460 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97014da5-d64b-4e4a-9b79-fb186d064cd8-logs\") pod \"97014da5-d64b-4e4a-9b79-fb186d064cd8\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.839597 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-combined-ca-bundle\") pod \"97014da5-d64b-4e4a-9b79-fb186d064cd8\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.839703 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgzhc\" (UniqueName: \"kubernetes.io/projected/97014da5-d64b-4e4a-9b79-fb186d064cd8-kube-api-access-pgzhc\") pod \"97014da5-d64b-4e4a-9b79-fb186d064cd8\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.839726 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-nova-metadata-tls-certs\") pod \"97014da5-d64b-4e4a-9b79-fb186d064cd8\" (UID: \"97014da5-d64b-4e4a-9b79-fb186d064cd8\") " Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.840210 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97014da5-d64b-4e4a-9b79-fb186d064cd8-logs" (OuterVolumeSpecName: "logs") pod "97014da5-d64b-4e4a-9b79-fb186d064cd8" (UID: "97014da5-d64b-4e4a-9b79-fb186d064cd8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.846848 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97014da5-d64b-4e4a-9b79-fb186d064cd8-kube-api-access-pgzhc" (OuterVolumeSpecName: "kube-api-access-pgzhc") pod "97014da5-d64b-4e4a-9b79-fb186d064cd8" (UID: "97014da5-d64b-4e4a-9b79-fb186d064cd8"). InnerVolumeSpecName "kube-api-access-pgzhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.870164 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-config-data" (OuterVolumeSpecName: "config-data") pod "97014da5-d64b-4e4a-9b79-fb186d064cd8" (UID: "97014da5-d64b-4e4a-9b79-fb186d064cd8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.880470 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97014da5-d64b-4e4a-9b79-fb186d064cd8" (UID: "97014da5-d64b-4e4a-9b79-fb186d064cd8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.883061 4816 generic.go:334] "Generic (PLEG): container finished" podID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerID="236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897" exitCode=0 Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.883094 4816 generic.go:334] "Generic (PLEG): container finished" podID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerID="7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0" exitCode=143 Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.883120 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.883152 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97014da5-d64b-4e4a-9b79-fb186d064cd8","Type":"ContainerDied","Data":"236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897"} Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.883185 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97014da5-d64b-4e4a-9b79-fb186d064cd8","Type":"ContainerDied","Data":"7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0"} Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.883200 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97014da5-d64b-4e4a-9b79-fb186d064cd8","Type":"ContainerDied","Data":"26df1d12c13f4183eaf034a4ea07088d601bef48ee43fbb6aceaa6ab5e74f9bf"} Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.883228 4816 scope.go:117] "RemoveContainer" containerID="236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.899196 4816 generic.go:334] "Generic (PLEG): container finished" podID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerID="ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02" exitCode=143 Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.899280 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e37be25-de5b-43dd-b779-6fad866f07f3","Type":"ContainerDied","Data":"ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02"} Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.901424 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a4cdd57a-f04a-476a-91a5-144d323b3c29" containerName="nova-scheduler-scheduler" containerID="cri-o://9000a125f98582a9cd007bbfa4d8257b42088c4d96b72af88e7c44130ed6d549" gracePeriod=30 Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.920268 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "97014da5-d64b-4e4a-9b79-fb186d064cd8" (UID: "97014da5-d64b-4e4a-9b79-fb186d064cd8"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.928888 4816 scope.go:117] "RemoveContainer" containerID="7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.959369 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.959407 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97014da5-d64b-4e4a-9b79-fb186d064cd8-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.959419 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.959432 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgzhc\" (UniqueName: \"kubernetes.io/projected/97014da5-d64b-4e4a-9b79-fb186d064cd8-kube-api-access-pgzhc\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:53 crc kubenswrapper[4816]: I0216 13:24:53.959441 4816 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/97014da5-d64b-4e4a-9b79-fb186d064cd8-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.031895 4816 scope.go:117] "RemoveContainer" containerID="236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897" Feb 16 13:24:54 crc kubenswrapper[4816]: E0216 13:24:54.033266 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897\": container with ID starting with 236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897 not found: ID does not exist" containerID="236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.033312 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897"} err="failed to get container status \"236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897\": rpc error: code = NotFound desc = could not find container \"236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897\": container with ID starting with 236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897 not found: ID does not exist" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.033342 4816 scope.go:117] "RemoveContainer" containerID="7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0" Feb 16 13:24:54 crc kubenswrapper[4816]: E0216 13:24:54.043952 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0\": container with ID starting with 7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0 not found: ID does not exist" containerID="7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.044007 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0"} err="failed to get container status \"7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0\": rpc error: code = NotFound desc = could not find container \"7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0\": container with ID starting with 7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0 not found: ID does not exist" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.044101 4816 scope.go:117] "RemoveContainer" containerID="236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.049893 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897"} err="failed to get container status \"236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897\": rpc error: code = NotFound desc = could not find container \"236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897\": container with ID starting with 236992a3ab9f676773d249d5b7b56692be77039b537744862fc1f1fc1dfaf897 not found: ID does not exist" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.049950 4816 scope.go:117] "RemoveContainer" containerID="7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.051055 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0"} err="failed to get container status \"7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0\": rpc error: code = NotFound desc = could not find container \"7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0\": container with ID starting with 7954620eb9e43ff96ec964596a16dbe2576c497666afadda4eda34b7cfcdf3b0 not found: ID does not exist" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.269796 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.282398 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.308576 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:54 crc kubenswrapper[4816]: E0216 13:24:54.308975 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1c78c8f-f023-4a68-b9ce-52b09090a1e2" containerName="dnsmasq-dns" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.308991 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1c78c8f-f023-4a68-b9ce-52b09090a1e2" containerName="dnsmasq-dns" Feb 16 13:24:54 crc kubenswrapper[4816]: E0216 13:24:54.309008 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1c78c8f-f023-4a68-b9ce-52b09090a1e2" containerName="init" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.309015 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1c78c8f-f023-4a68-b9ce-52b09090a1e2" containerName="init" Feb 16 13:24:54 crc kubenswrapper[4816]: E0216 13:24:54.309027 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e221f94-9c2b-4f98-bc3b-3342bc071e6c" containerName="nova-manage" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.309033 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e221f94-9c2b-4f98-bc3b-3342bc071e6c" containerName="nova-manage" Feb 16 13:24:54 crc kubenswrapper[4816]: E0216 13:24:54.309054 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerName="nova-metadata-metadata" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.309060 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerName="nova-metadata-metadata" Feb 16 13:24:54 crc kubenswrapper[4816]: E0216 13:24:54.309071 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerName="nova-metadata-log" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.309077 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerName="nova-metadata-log" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.309263 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1c78c8f-f023-4a68-b9ce-52b09090a1e2" containerName="dnsmasq-dns" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.309277 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerName="nova-metadata-log" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.309293 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e221f94-9c2b-4f98-bc3b-3342bc071e6c" containerName="nova-manage" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.309300 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="97014da5-d64b-4e4a-9b79-fb186d064cd8" containerName="nova-metadata-metadata" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.310283 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.312310 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.315477 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.322304 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.482205 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.482404 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.482486 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5330f89-cc28-46cd-9119-a2fb00da6220-logs\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.482531 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-config-data\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.482727 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48r82\" (UniqueName: \"kubernetes.io/projected/f5330f89-cc28-46cd-9119-a2fb00da6220-kube-api-access-48r82\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.502899 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.584437 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.594219 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5330f89-cc28-46cd-9119-a2fb00da6220-logs\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.594280 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-config-data\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.594436 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48r82\" (UniqueName: \"kubernetes.io/projected/f5330f89-cc28-46cd-9119-a2fb00da6220-kube-api-access-48r82\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.594519 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.601913 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.602492 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5330f89-cc28-46cd-9119-a2fb00da6220-logs\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.603624 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.617465 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-config-data\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.628156 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48r82\" (UniqueName: \"kubernetes.io/projected/f5330f89-cc28-46cd-9119-a2fb00da6220-kube-api-access-48r82\") pod \"nova-metadata-0\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.666581 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.696210 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-scripts\") pod \"25fd1ebc-4f40-4a30-8685-05b050cca498\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.696394 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-config-data\") pod \"25fd1ebc-4f40-4a30-8685-05b050cca498\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.696451 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxfs7\" (UniqueName: \"kubernetes.io/projected/25fd1ebc-4f40-4a30-8685-05b050cca498-kube-api-access-nxfs7\") pod \"25fd1ebc-4f40-4a30-8685-05b050cca498\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.696606 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-combined-ca-bundle\") pod \"25fd1ebc-4f40-4a30-8685-05b050cca498\" (UID: \"25fd1ebc-4f40-4a30-8685-05b050cca498\") " Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.704838 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-scripts" (OuterVolumeSpecName: "scripts") pod "25fd1ebc-4f40-4a30-8685-05b050cca498" (UID: "25fd1ebc-4f40-4a30-8685-05b050cca498"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.704960 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25fd1ebc-4f40-4a30-8685-05b050cca498-kube-api-access-nxfs7" (OuterVolumeSpecName: "kube-api-access-nxfs7") pod "25fd1ebc-4f40-4a30-8685-05b050cca498" (UID: "25fd1ebc-4f40-4a30-8685-05b050cca498"). InnerVolumeSpecName "kube-api-access-nxfs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.733332 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25fd1ebc-4f40-4a30-8685-05b050cca498" (UID: "25fd1ebc-4f40-4a30-8685-05b050cca498"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.739765 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-config-data" (OuterVolumeSpecName: "config-data") pod "25fd1ebc-4f40-4a30-8685-05b050cca498" (UID: "25fd1ebc-4f40-4a30-8685-05b050cca498"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.799591 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.799648 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.799687 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxfs7\" (UniqueName: \"kubernetes.io/projected/25fd1ebc-4f40-4a30-8685-05b050cca498-kube-api-access-nxfs7\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.799704 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fd1ebc-4f40-4a30-8685-05b050cca498-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.921097 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-46pcb" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.921313 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-46pcb" event={"ID":"25fd1ebc-4f40-4a30-8685-05b050cca498","Type":"ContainerDied","Data":"55ebdd3284774287683e61371e31216ab34887eae9272513877a13403df74282"} Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.921701 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55ebdd3284774287683e61371e31216ab34887eae9272513877a13403df74282" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.987160 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 13:24:54 crc kubenswrapper[4816]: E0216 13:24:54.987962 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25fd1ebc-4f40-4a30-8685-05b050cca498" containerName="nova-cell1-conductor-db-sync" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.987989 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="25fd1ebc-4f40-4a30-8685-05b050cca498" containerName="nova-cell1-conductor-db-sync" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.988202 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="25fd1ebc-4f40-4a30-8685-05b050cca498" containerName="nova-cell1-conductor-db-sync" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.993565 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:54 crc kubenswrapper[4816]: I0216 13:24:54.997735 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.000399 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.029733 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.029919 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.030214 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qmxh\" (UniqueName: \"kubernetes.io/projected/911fa155-35a3-49ee-9bc0-f10a8bac544d-kube-api-access-9qmxh\") pod \"nova-cell1-conductor-0\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.131392 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.131548 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qmxh\" (UniqueName: \"kubernetes.io/projected/911fa155-35a3-49ee-9bc0-f10a8bac544d-kube-api-access-9qmxh\") pod \"nova-cell1-conductor-0\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.131601 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.136961 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.150806 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qmxh\" (UniqueName: \"kubernetes.io/projected/911fa155-35a3-49ee-9bc0-f10a8bac544d-kube-api-access-9qmxh\") pod \"nova-cell1-conductor-0\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.151630 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.170169 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:24:55 crc kubenswrapper[4816]: W0216 13:24:55.172789 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5330f89_cc28_46cd_9119_a2fb00da6220.slice/crio-d90f4ac56544a200da5ad544fa476c92a517b1b392e574b9f936f8cce0c47180 WatchSource:0}: Error finding container d90f4ac56544a200da5ad544fa476c92a517b1b392e574b9f936f8cce0c47180: Status 404 returned error can't find the container with id d90f4ac56544a200da5ad544fa476c92a517b1b392e574b9f936f8cce0c47180 Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.352094 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.415491 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97014da5-d64b-4e4a-9b79-fb186d064cd8" path="/var/lib/kubelet/pods/97014da5-d64b-4e4a-9b79-fb186d064cd8/volumes" Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.869290 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 13:24:55 crc kubenswrapper[4816]: W0216 13:24:55.874353 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod911fa155_35a3_49ee_9bc0_f10a8bac544d.slice/crio-fe9a20a39c017c7460f582da4b5d8a0033574c8d81f78048527e45c651561ea2 WatchSource:0}: Error finding container fe9a20a39c017c7460f582da4b5d8a0033574c8d81f78048527e45c651561ea2: Status 404 returned error can't find the container with id fe9a20a39c017c7460f582da4b5d8a0033574c8d81f78048527e45c651561ea2 Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.935997 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f5330f89-cc28-46cd-9119-a2fb00da6220","Type":"ContainerStarted","Data":"4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee"} Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.936044 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f5330f89-cc28-46cd-9119-a2fb00da6220","Type":"ContainerStarted","Data":"e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7"} Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.936057 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f5330f89-cc28-46cd-9119-a2fb00da6220","Type":"ContainerStarted","Data":"d90f4ac56544a200da5ad544fa476c92a517b1b392e574b9f936f8cce0c47180"} Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.938720 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"911fa155-35a3-49ee-9bc0-f10a8bac544d","Type":"ContainerStarted","Data":"fe9a20a39c017c7460f582da4b5d8a0033574c8d81f78048527e45c651561ea2"} Feb 16 13:24:55 crc kubenswrapper[4816]: I0216 13:24:55.963314 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.963294707 podStartE2EDuration="1.963294707s" podCreationTimestamp="2026-02-16 13:24:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:24:55.959078922 +0000 UTC m=+1295.285792660" watchObservedRunningTime="2026-02-16 13:24:55.963294707 +0000 UTC m=+1295.290008445" Feb 16 13:24:56 crc kubenswrapper[4816]: E0216 13:24:56.683215 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9000a125f98582a9cd007bbfa4d8257b42088c4d96b72af88e7c44130ed6d549" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 13:24:56 crc kubenswrapper[4816]: E0216 13:24:56.685312 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9000a125f98582a9cd007bbfa4d8257b42088c4d96b72af88e7c44130ed6d549" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 13:24:56 crc kubenswrapper[4816]: E0216 13:24:56.686761 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9000a125f98582a9cd007bbfa4d8257b42088c4d96b72af88e7c44130ed6d549" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 13:24:56 crc kubenswrapper[4816]: E0216 13:24:56.686813 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="a4cdd57a-f04a-476a-91a5-144d323b3c29" containerName="nova-scheduler-scheduler" Feb 16 13:24:56 crc kubenswrapper[4816]: I0216 13:24:56.949543 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"911fa155-35a3-49ee-9bc0-f10a8bac544d","Type":"ContainerStarted","Data":"67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213"} Feb 16 13:24:56 crc kubenswrapper[4816]: I0216 13:24:56.975430 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.975403588 podStartE2EDuration="2.975403588s" podCreationTimestamp="2026-02-16 13:24:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:24:56.970567946 +0000 UTC m=+1296.297281674" watchObservedRunningTime="2026-02-16 13:24:56.975403588 +0000 UTC m=+1296.302117336" Feb 16 13:24:57 crc kubenswrapper[4816]: I0216 13:24:57.966451 4816 generic.go:334] "Generic (PLEG): container finished" podID="a4cdd57a-f04a-476a-91a5-144d323b3c29" containerID="9000a125f98582a9cd007bbfa4d8257b42088c4d96b72af88e7c44130ed6d549" exitCode=0 Feb 16 13:24:57 crc kubenswrapper[4816]: I0216 13:24:57.966544 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a4cdd57a-f04a-476a-91a5-144d323b3c29","Type":"ContainerDied","Data":"9000a125f98582a9cd007bbfa4d8257b42088c4d96b72af88e7c44130ed6d549"} Feb 16 13:24:57 crc kubenswrapper[4816]: I0216 13:24:57.967552 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.282168 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.384031 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-config-data\") pod \"a4cdd57a-f04a-476a-91a5-144d323b3c29\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.384083 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-combined-ca-bundle\") pod \"a4cdd57a-f04a-476a-91a5-144d323b3c29\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.384178 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbfdm\" (UniqueName: \"kubernetes.io/projected/a4cdd57a-f04a-476a-91a5-144d323b3c29-kube-api-access-dbfdm\") pod \"a4cdd57a-f04a-476a-91a5-144d323b3c29\" (UID: \"a4cdd57a-f04a-476a-91a5-144d323b3c29\") " Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.390121 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4cdd57a-f04a-476a-91a5-144d323b3c29-kube-api-access-dbfdm" (OuterVolumeSpecName: "kube-api-access-dbfdm") pod "a4cdd57a-f04a-476a-91a5-144d323b3c29" (UID: "a4cdd57a-f04a-476a-91a5-144d323b3c29"). InnerVolumeSpecName "kube-api-access-dbfdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.419059 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a4cdd57a-f04a-476a-91a5-144d323b3c29" (UID: "a4cdd57a-f04a-476a-91a5-144d323b3c29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.419803 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-config-data" (OuterVolumeSpecName: "config-data") pod "a4cdd57a-f04a-476a-91a5-144d323b3c29" (UID: "a4cdd57a-f04a-476a-91a5-144d323b3c29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.487090 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.487133 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cdd57a-f04a-476a-91a5-144d323b3c29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.487148 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbfdm\" (UniqueName: \"kubernetes.io/projected/a4cdd57a-f04a-476a-91a5-144d323b3c29-kube-api-access-dbfdm\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.959566 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.980355 4816 generic.go:334] "Generic (PLEG): container finished" podID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerID="9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076" exitCode=0 Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.980489 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.984720 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e37be25-de5b-43dd-b779-6fad866f07f3","Type":"ContainerDied","Data":"9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076"} Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.984777 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e37be25-de5b-43dd-b779-6fad866f07f3","Type":"ContainerDied","Data":"9a11088911298a7596f4bd7ac8e5eb0ed517775d6cbb271e7800c62b608731b4"} Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.984802 4816 scope.go:117] "RemoveContainer" containerID="9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076" Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.993847 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a4cdd57a-f04a-476a-91a5-144d323b3c29","Type":"ContainerDied","Data":"5ffb43fcb7ce9457bf59c6f13de8c1e90231ae0cac0afcba0c251f7dff37342b"} Feb 16 13:24:58 crc kubenswrapper[4816]: I0216 13:24:58.995403 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.011232 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-combined-ca-bundle\") pod \"3e37be25-de5b-43dd-b779-6fad866f07f3\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.011287 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-config-data\") pod \"3e37be25-de5b-43dd-b779-6fad866f07f3\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.011388 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e37be25-de5b-43dd-b779-6fad866f07f3-logs\") pod \"3e37be25-de5b-43dd-b779-6fad866f07f3\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.011451 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttdsh\" (UniqueName: \"kubernetes.io/projected/3e37be25-de5b-43dd-b779-6fad866f07f3-kube-api-access-ttdsh\") pod \"3e37be25-de5b-43dd-b779-6fad866f07f3\" (UID: \"3e37be25-de5b-43dd-b779-6fad866f07f3\") " Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.012481 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e37be25-de5b-43dd-b779-6fad866f07f3-logs" (OuterVolumeSpecName: "logs") pod "3e37be25-de5b-43dd-b779-6fad866f07f3" (UID: "3e37be25-de5b-43dd-b779-6fad866f07f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.014114 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e37be25-de5b-43dd-b779-6fad866f07f3-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.027556 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e37be25-de5b-43dd-b779-6fad866f07f3-kube-api-access-ttdsh" (OuterVolumeSpecName: "kube-api-access-ttdsh") pod "3e37be25-de5b-43dd-b779-6fad866f07f3" (UID: "3e37be25-de5b-43dd-b779-6fad866f07f3"). InnerVolumeSpecName "kube-api-access-ttdsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.039707 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-config-data" (OuterVolumeSpecName: "config-data") pod "3e37be25-de5b-43dd-b779-6fad866f07f3" (UID: "3e37be25-de5b-43dd-b779-6fad866f07f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.086802 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e37be25-de5b-43dd-b779-6fad866f07f3" (UID: "3e37be25-de5b-43dd-b779-6fad866f07f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.115890 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttdsh\" (UniqueName: \"kubernetes.io/projected/3e37be25-de5b-43dd-b779-6fad866f07f3-kube-api-access-ttdsh\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.115945 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.115960 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e37be25-de5b-43dd-b779-6fad866f07f3-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.151744 4816 scope.go:117] "RemoveContainer" containerID="ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.168876 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.180467 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.183409 4816 scope.go:117] "RemoveContainer" containerID="9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076" Feb 16 13:24:59 crc kubenswrapper[4816]: E0216 13:24:59.185333 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076\": container with ID starting with 9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076 not found: ID does not exist" containerID="9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.185376 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076"} err="failed to get container status \"9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076\": rpc error: code = NotFound desc = could not find container \"9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076\": container with ID starting with 9cdb8319cd3bf090a9215c24cf7674351c8e822c4306a073543029e8505d7076 not found: ID does not exist" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.185405 4816 scope.go:117] "RemoveContainer" containerID="ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02" Feb 16 13:24:59 crc kubenswrapper[4816]: E0216 13:24:59.189153 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02\": container with ID starting with ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02 not found: ID does not exist" containerID="ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.189181 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02"} err="failed to get container status \"ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02\": rpc error: code = NotFound desc = could not find container \"ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02\": container with ID starting with ae7dacdf4230639041200ebbd1ad7e3df2203fa93908c2eba59e89a12b3aee02 not found: ID does not exist" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.189195 4816 scope.go:117] "RemoveContainer" containerID="9000a125f98582a9cd007bbfa4d8257b42088c4d96b72af88e7c44130ed6d549" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.191019 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:24:59 crc kubenswrapper[4816]: E0216 13:24:59.191432 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-api" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.191451 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-api" Feb 16 13:24:59 crc kubenswrapper[4816]: E0216 13:24:59.191465 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4cdd57a-f04a-476a-91a5-144d323b3c29" containerName="nova-scheduler-scheduler" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.191471 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4cdd57a-f04a-476a-91a5-144d323b3c29" containerName="nova-scheduler-scheduler" Feb 16 13:24:59 crc kubenswrapper[4816]: E0216 13:24:59.191491 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-log" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.191497 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-log" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.191649 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-log" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.191676 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" containerName="nova-api-api" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.191695 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4cdd57a-f04a-476a-91a5-144d323b3c29" containerName="nova-scheduler-scheduler" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.192305 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.203390 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.204407 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.217913 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.217958 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-config-data\") pod \"nova-scheduler-0\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.218004 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hpdm\" (UniqueName: \"kubernetes.io/projected/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-kube-api-access-8hpdm\") pod \"nova-scheduler-0\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.317595 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.320390 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hpdm\" (UniqueName: \"kubernetes.io/projected/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-kube-api-access-8hpdm\") pod \"nova-scheduler-0\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.320704 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.320766 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-config-data\") pod \"nova-scheduler-0\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.324643 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.326253 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-config-data\") pod \"nova-scheduler-0\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.330800 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.359818 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.362455 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.364991 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.372116 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hpdm\" (UniqueName: \"kubernetes.io/projected/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-kube-api-access-8hpdm\") pod \"nova-scheduler-0\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.372677 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.410063 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e37be25-de5b-43dd-b779-6fad866f07f3" path="/var/lib/kubelet/pods/3e37be25-de5b-43dd-b779-6fad866f07f3/volumes" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.410715 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4cdd57a-f04a-476a-91a5-144d323b3c29" path="/var/lib/kubelet/pods/a4cdd57a-f04a-476a-91a5-144d323b3c29/volumes" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.422960 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-config-data\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.423047 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dncqx\" (UniqueName: \"kubernetes.io/projected/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-kube-api-access-dncqx\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.423170 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.423307 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-logs\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.513358 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.526229 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.526415 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-logs\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.526472 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-config-data\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.526547 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dncqx\" (UniqueName: \"kubernetes.io/projected/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-kube-api-access-dncqx\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.526940 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-logs\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.530136 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.530327 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-config-data\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.550433 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dncqx\" (UniqueName: \"kubernetes.io/projected/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-kube-api-access-dncqx\") pod \"nova-api-0\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.667132 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.667611 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.736432 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.862866 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 16 13:24:59 crc kubenswrapper[4816]: I0216 13:24:59.948408 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:25:00 crc kubenswrapper[4816]: I0216 13:25:00.009566 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f","Type":"ContainerStarted","Data":"d9e5a902fc9aaf1fd6ad4a66d09aa352b431c3bfd239f29b92dea43d20055448"} Feb 16 13:25:00 crc kubenswrapper[4816]: W0216 13:25:00.206965 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa01bd5c_1bc2_423a_bfa3_a0411bea1afc.slice/crio-d45129601f57fa96dd03fedb977abd763cb6775a4116767eee15f40aed3b4ff6 WatchSource:0}: Error finding container d45129601f57fa96dd03fedb977abd763cb6775a4116767eee15f40aed3b4ff6: Status 404 returned error can't find the container with id d45129601f57fa96dd03fedb977abd763cb6775a4116767eee15f40aed3b4ff6 Feb 16 13:25:00 crc kubenswrapper[4816]: I0216 13:25:00.207158 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:01 crc kubenswrapper[4816]: I0216 13:25:01.026643 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f","Type":"ContainerStarted","Data":"83122580674bda87f2a57af7d86fc48d318d72637c6ef769957dc9804b92ab66"} Feb 16 13:25:01 crc kubenswrapper[4816]: I0216 13:25:01.033705 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc","Type":"ContainerStarted","Data":"2421e0c16ddc2a0653671ff82c882fdcc48898092dd506013711b5886e1eb877"} Feb 16 13:25:01 crc kubenswrapper[4816]: I0216 13:25:01.033769 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc","Type":"ContainerStarted","Data":"4bb52d950b8046a9e69a5960c76c6af1619b06ddc7d4e281fdf48cfbcb9eceb0"} Feb 16 13:25:01 crc kubenswrapper[4816]: I0216 13:25:01.033781 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc","Type":"ContainerStarted","Data":"d45129601f57fa96dd03fedb977abd763cb6775a4116767eee15f40aed3b4ff6"} Feb 16 13:25:01 crc kubenswrapper[4816]: I0216 13:25:01.070820 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.07078753 podStartE2EDuration="2.07078753s" podCreationTimestamp="2026-02-16 13:24:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:25:01.04810972 +0000 UTC m=+1300.374823468" watchObservedRunningTime="2026-02-16 13:25:01.07078753 +0000 UTC m=+1300.397501268" Feb 16 13:25:01 crc kubenswrapper[4816]: I0216 13:25:01.080408 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.080373341 podStartE2EDuration="2.080373341s" podCreationTimestamp="2026-02-16 13:24:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:25:01.065636699 +0000 UTC m=+1300.392350437" watchObservedRunningTime="2026-02-16 13:25:01.080373341 +0000 UTC m=+1300.407087079" Feb 16 13:25:03 crc kubenswrapper[4816]: I0216 13:25:03.748156 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:25:03 crc kubenswrapper[4816]: I0216 13:25:03.748899 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="ccaf33e6-b7e7-42b7-9ab5-dea152b2853f" containerName="kube-state-metrics" containerID="cri-o://612e60441780268db4eb25969864048ff43689c774f46ef5ce5d5ba8b8ffe540" gracePeriod=30 Feb 16 13:25:04 crc kubenswrapper[4816]: I0216 13:25:04.066566 4816 generic.go:334] "Generic (PLEG): container finished" podID="ccaf33e6-b7e7-42b7-9ab5-dea152b2853f" containerID="612e60441780268db4eb25969864048ff43689c774f46ef5ce5d5ba8b8ffe540" exitCode=2 Feb 16 13:25:04 crc kubenswrapper[4816]: I0216 13:25:04.066915 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ccaf33e6-b7e7-42b7-9ab5-dea152b2853f","Type":"ContainerDied","Data":"612e60441780268db4eb25969864048ff43689c774f46ef5ce5d5ba8b8ffe540"} Feb 16 13:25:04 crc kubenswrapper[4816]: I0216 13:25:04.290940 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 13:25:04 crc kubenswrapper[4816]: I0216 13:25:04.453995 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbnc4\" (UniqueName: \"kubernetes.io/projected/ccaf33e6-b7e7-42b7-9ab5-dea152b2853f-kube-api-access-jbnc4\") pod \"ccaf33e6-b7e7-42b7-9ab5-dea152b2853f\" (UID: \"ccaf33e6-b7e7-42b7-9ab5-dea152b2853f\") " Feb 16 13:25:04 crc kubenswrapper[4816]: I0216 13:25:04.460205 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccaf33e6-b7e7-42b7-9ab5-dea152b2853f-kube-api-access-jbnc4" (OuterVolumeSpecName: "kube-api-access-jbnc4") pod "ccaf33e6-b7e7-42b7-9ab5-dea152b2853f" (UID: "ccaf33e6-b7e7-42b7-9ab5-dea152b2853f"). InnerVolumeSpecName "kube-api-access-jbnc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:04 crc kubenswrapper[4816]: I0216 13:25:04.514509 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 16 13:25:04 crc kubenswrapper[4816]: I0216 13:25:04.556052 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbnc4\" (UniqueName: \"kubernetes.io/projected/ccaf33e6-b7e7-42b7-9ab5-dea152b2853f-kube-api-access-jbnc4\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:04 crc kubenswrapper[4816]: I0216 13:25:04.667107 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 13:25:04 crc kubenswrapper[4816]: I0216 13:25:04.667167 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.094252 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ccaf33e6-b7e7-42b7-9ab5-dea152b2853f","Type":"ContainerDied","Data":"eff8effdcc3f74fcdbe83e242ead2381ef59e69e85f7c85622fce7b8c76e3489"} Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.094364 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.094510 4816 scope.go:117] "RemoveContainer" containerID="612e60441780268db4eb25969864048ff43689c774f46ef5ce5d5ba8b8ffe540" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.144610 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.168258 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.196786 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:25:05 crc kubenswrapper[4816]: E0216 13:25:05.197252 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccaf33e6-b7e7-42b7-9ab5-dea152b2853f" containerName="kube-state-metrics" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.197274 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccaf33e6-b7e7-42b7-9ab5-dea152b2853f" containerName="kube-state-metrics" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.197511 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccaf33e6-b7e7-42b7-9ab5-dea152b2853f" containerName="kube-state-metrics" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.203475 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.206776 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.207321 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.233708 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.378676 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sbrx\" (UniqueName: \"kubernetes.io/projected/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-api-access-8sbrx\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.378749 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.378771 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.379574 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.384359 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.410895 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccaf33e6-b7e7-42b7-9ab5-dea152b2853f" path="/var/lib/kubelet/pods/ccaf33e6-b7e7-42b7-9ab5-dea152b2853f/volumes" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.482300 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sbrx\" (UniqueName: \"kubernetes.io/projected/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-api-access-8sbrx\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.482414 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.482458 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.482730 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.488254 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.490354 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.492436 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.508459 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sbrx\" (UniqueName: \"kubernetes.io/projected/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-api-access-8sbrx\") pod \"kube-state-metrics-0\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.540596 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.680481 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 16 13:25:05 crc kubenswrapper[4816]: I0216 13:25:05.680805 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 16 13:25:06 crc kubenswrapper[4816]: I0216 13:25:06.121068 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:25:06 crc kubenswrapper[4816]: W0216 13:25:06.137930 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28ab0c0d_5c1e_403d_a3d9_234a5c723884.slice/crio-ce6cf04ac5ff535df9636eaa8fd53db8b87db6b4f6352539f42ea21e3840f097 WatchSource:0}: Error finding container ce6cf04ac5ff535df9636eaa8fd53db8b87db6b4f6352539f42ea21e3840f097: Status 404 returned error can't find the container with id ce6cf04ac5ff535df9636eaa8fd53db8b87db6b4f6352539f42ea21e3840f097 Feb 16 13:25:06 crc kubenswrapper[4816]: I0216 13:25:06.269582 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:06 crc kubenswrapper[4816]: I0216 13:25:06.270238 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="ceilometer-central-agent" containerID="cri-o://368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9" gracePeriod=30 Feb 16 13:25:06 crc kubenswrapper[4816]: I0216 13:25:06.270302 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="proxy-httpd" containerID="cri-o://b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703" gracePeriod=30 Feb 16 13:25:06 crc kubenswrapper[4816]: I0216 13:25:06.270381 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="sg-core" containerID="cri-o://abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429" gracePeriod=30 Feb 16 13:25:06 crc kubenswrapper[4816]: I0216 13:25:06.270443 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="ceilometer-notification-agent" containerID="cri-o://14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec" gracePeriod=30 Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.161632 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28ab0c0d-5c1e-403d-a3d9-234a5c723884","Type":"ContainerStarted","Data":"eaecabde41b4e021829e9c54ac76b1d41288afa9788f12cfc4efc87303bf69b9"} Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.161953 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28ab0c0d-5c1e-403d-a3d9-234a5c723884","Type":"ContainerStarted","Data":"ce6cf04ac5ff535df9636eaa8fd53db8b87db6b4f6352539f42ea21e3840f097"} Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.161969 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.165221 4816 generic.go:334] "Generic (PLEG): container finished" podID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerID="b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703" exitCode=0 Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.165263 4816 generic.go:334] "Generic (PLEG): container finished" podID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerID="abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429" exitCode=2 Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.165273 4816 generic.go:334] "Generic (PLEG): container finished" podID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerID="368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9" exitCode=0 Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.165301 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerDied","Data":"b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703"} Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.165334 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerDied","Data":"abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429"} Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.165349 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerDied","Data":"368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9"} Feb 16 13:25:07 crc kubenswrapper[4816]: I0216 13:25:07.177744 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.729338528 podStartE2EDuration="2.177721685s" podCreationTimestamp="2026-02-16 13:25:05 +0000 UTC" firstStartedPulling="2026-02-16 13:25:06.140309101 +0000 UTC m=+1305.467022829" lastFinishedPulling="2026-02-16 13:25:06.588692258 +0000 UTC m=+1305.915405986" observedRunningTime="2026-02-16 13:25:07.174115867 +0000 UTC m=+1306.500829595" watchObservedRunningTime="2026-02-16 13:25:07.177721685 +0000 UTC m=+1306.504435413" Feb 16 13:25:08 crc kubenswrapper[4816]: I0216 13:25:08.982285 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="ccaf33e6-b7e7-42b7-9ab5-dea152b2853f" containerName="kube-state-metrics" probeResult="failure" output="Get \"http://10.217.0.102:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.089193 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.144790 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-run-httpd\") pod \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.144889 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-sg-core-conf-yaml\") pod \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.144974 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-scripts\") pod \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.145004 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-log-httpd\") pod \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.145045 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6xw9\" (UniqueName: \"kubernetes.io/projected/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-kube-api-access-g6xw9\") pod \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.145114 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-combined-ca-bundle\") pod \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.145175 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-config-data\") pod \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\" (UID: \"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d\") " Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.147757 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" (UID: "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.148173 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" (UID: "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.158901 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-scripts" (OuterVolumeSpecName: "scripts") pod "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" (UID: "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.158947 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-kube-api-access-g6xw9" (OuterVolumeSpecName: "kube-api-access-g6xw9") pod "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" (UID: "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d"). InnerVolumeSpecName "kube-api-access-g6xw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.190183 4816 generic.go:334] "Generic (PLEG): container finished" podID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerID="14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec" exitCode=0 Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.190224 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerDied","Data":"14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec"} Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.190251 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c7e2473-f87b-42be-a0e2-92ab00fdbd7d","Type":"ContainerDied","Data":"efa64530580196be782ea4434e6a1b1263a196900be124467c0d9941a0d846c7"} Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.190268 4816 scope.go:117] "RemoveContainer" containerID="b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.190400 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.191831 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" (UID: "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.219806 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" (UID: "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.246961 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.247157 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.247217 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6xw9\" (UniqueName: \"kubernetes.io/projected/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-kube-api-access-g6xw9\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.247307 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.247389 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.247450 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.254347 4816 scope.go:117] "RemoveContainer" containerID="abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.272297 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-config-data" (OuterVolumeSpecName: "config-data") pod "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" (UID: "5c7e2473-f87b-42be-a0e2-92ab00fdbd7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.276035 4816 scope.go:117] "RemoveContainer" containerID="14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.303854 4816 scope.go:117] "RemoveContainer" containerID="368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.331747 4816 scope.go:117] "RemoveContainer" containerID="b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703" Feb 16 13:25:09 crc kubenswrapper[4816]: E0216 13:25:09.332230 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703\": container with ID starting with b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703 not found: ID does not exist" containerID="b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.332337 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703"} err="failed to get container status \"b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703\": rpc error: code = NotFound desc = could not find container \"b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703\": container with ID starting with b8a46a77c5129fb17b13300aaea38afa7ac2c23d74354dd0cb627f1bf9d67703 not found: ID does not exist" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.332412 4816 scope.go:117] "RemoveContainer" containerID="abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429" Feb 16 13:25:09 crc kubenswrapper[4816]: E0216 13:25:09.333258 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429\": container with ID starting with abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429 not found: ID does not exist" containerID="abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.333345 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429"} err="failed to get container status \"abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429\": rpc error: code = NotFound desc = could not find container \"abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429\": container with ID starting with abcb0f1ba86af301dbf5818bfa894c709a8840fde4ecc9f2d643e6553cf7f429 not found: ID does not exist" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.333418 4816 scope.go:117] "RemoveContainer" containerID="14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec" Feb 16 13:25:09 crc kubenswrapper[4816]: E0216 13:25:09.333831 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec\": container with ID starting with 14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec not found: ID does not exist" containerID="14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.333917 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec"} err="failed to get container status \"14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec\": rpc error: code = NotFound desc = could not find container \"14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec\": container with ID starting with 14c60c06b0790015a374ee0c18d8e3e13cd3efd51255aab5ceeadbdbf384beec not found: ID does not exist" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.333983 4816 scope.go:117] "RemoveContainer" containerID="368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9" Feb 16 13:25:09 crc kubenswrapper[4816]: E0216 13:25:09.334369 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9\": container with ID starting with 368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9 not found: ID does not exist" containerID="368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.334449 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9"} err="failed to get container status \"368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9\": rpc error: code = NotFound desc = could not find container \"368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9\": container with ID starting with 368266352f40a4b4e3cee9e031dc2f2eaf9027a065a66b5f2cf8de691a5d9bf9 not found: ID does not exist" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.362869 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.514497 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.516667 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.525037 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.542401 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:09 crc kubenswrapper[4816]: E0216 13:25:09.542856 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="ceilometer-central-agent" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.542874 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="ceilometer-central-agent" Feb 16 13:25:09 crc kubenswrapper[4816]: E0216 13:25:09.542902 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="sg-core" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.542909 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="sg-core" Feb 16 13:25:09 crc kubenswrapper[4816]: E0216 13:25:09.542929 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="ceilometer-notification-agent" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.542937 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="ceilometer-notification-agent" Feb 16 13:25:09 crc kubenswrapper[4816]: E0216 13:25:09.542964 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="proxy-httpd" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.542970 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="proxy-httpd" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.543134 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="sg-core" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.543146 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="ceilometer-central-agent" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.543158 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="proxy-httpd" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.543167 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" containerName="ceilometer-notification-agent" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.544754 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.554224 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.582025 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.582086 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.582112 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-run-httpd\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.582140 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-scripts\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.582182 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-log-httpd\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.582209 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn2q5\" (UniqueName: \"kubernetes.io/projected/48c0747c-1464-4d37-9b5b-3583ae413353-kube-api-access-zn2q5\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.582230 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-config-data\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.582337 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.585152 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.585326 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.586299 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.618509 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.684552 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.684631 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.684682 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-run-httpd\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.684727 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-scripts\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.684788 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-log-httpd\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.684827 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn2q5\" (UniqueName: \"kubernetes.io/projected/48c0747c-1464-4d37-9b5b-3583ae413353-kube-api-access-zn2q5\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.684862 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-config-data\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.684925 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.685573 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-run-httpd\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.685874 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-log-httpd\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.690278 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-config-data\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.690482 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.690494 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.691384 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.696569 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-scripts\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.704050 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn2q5\" (UniqueName: \"kubernetes.io/projected/48c0747c-1464-4d37-9b5b-3583ae413353-kube-api-access-zn2q5\") pod \"ceilometer-0\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " pod="openstack/ceilometer-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.736356 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.736822 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 13:25:09 crc kubenswrapper[4816]: I0216 13:25:09.894994 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:10 crc kubenswrapper[4816]: I0216 13:25:10.236874 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 16 13:25:10 crc kubenswrapper[4816]: I0216 13:25:10.532349 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:10 crc kubenswrapper[4816]: W0216 13:25:10.543113 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48c0747c_1464_4d37_9b5b_3583ae413353.slice/crio-6c30b0aabc6bde73faff064849bbc44391acced1f6ed463379605b6edd1429cd WatchSource:0}: Error finding container 6c30b0aabc6bde73faff064849bbc44391acced1f6ed463379605b6edd1429cd: Status 404 returned error can't find the container with id 6c30b0aabc6bde73faff064849bbc44391acced1f6ed463379605b6edd1429cd Feb 16 13:25:10 crc kubenswrapper[4816]: I0216 13:25:10.546298 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 13:25:10 crc kubenswrapper[4816]: I0216 13:25:10.818885 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 13:25:10 crc kubenswrapper[4816]: I0216 13:25:10.818885 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 13:25:11 crc kubenswrapper[4816]: I0216 13:25:11.230059 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerStarted","Data":"6c30b0aabc6bde73faff064849bbc44391acced1f6ed463379605b6edd1429cd"} Feb 16 13:25:11 crc kubenswrapper[4816]: I0216 13:25:11.418019 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c7e2473-f87b-42be-a0e2-92ab00fdbd7d" path="/var/lib/kubelet/pods/5c7e2473-f87b-42be-a0e2-92ab00fdbd7d/volumes" Feb 16 13:25:12 crc kubenswrapper[4816]: I0216 13:25:12.241714 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerStarted","Data":"fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23"} Feb 16 13:25:12 crc kubenswrapper[4816]: I0216 13:25:12.242820 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerStarted","Data":"4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2"} Feb 16 13:25:13 crc kubenswrapper[4816]: I0216 13:25:13.254197 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerStarted","Data":"2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1"} Feb 16 13:25:14 crc kubenswrapper[4816]: I0216 13:25:14.268541 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerStarted","Data":"d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12"} Feb 16 13:25:14 crc kubenswrapper[4816]: I0216 13:25:14.269153 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 13:25:14 crc kubenswrapper[4816]: I0216 13:25:14.671899 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 13:25:14 crc kubenswrapper[4816]: I0216 13:25:14.674336 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 13:25:14 crc kubenswrapper[4816]: I0216 13:25:14.679454 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 13:25:14 crc kubenswrapper[4816]: I0216 13:25:14.697578 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.255702765 podStartE2EDuration="5.697560241s" podCreationTimestamp="2026-02-16 13:25:09 +0000 UTC" firstStartedPulling="2026-02-16 13:25:10.546065633 +0000 UTC m=+1309.872779361" lastFinishedPulling="2026-02-16 13:25:13.987923109 +0000 UTC m=+1313.314636837" observedRunningTime="2026-02-16 13:25:14.296288131 +0000 UTC m=+1313.623001859" watchObservedRunningTime="2026-02-16 13:25:14.697560241 +0000 UTC m=+1314.024273979" Feb 16 13:25:15 crc kubenswrapper[4816]: I0216 13:25:15.286626 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 13:25:15 crc kubenswrapper[4816]: I0216 13:25:15.556004 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.277281 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.309457 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-combined-ca-bundle\") pod \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.309560 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsjcz\" (UniqueName: \"kubernetes.io/projected/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-kube-api-access-wsjcz\") pod \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.309692 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-config-data\") pod \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\" (UID: \"8a176030-0545-4a2b-b9c8-cce6a69dfdbe\") " Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.317782 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-kube-api-access-wsjcz" (OuterVolumeSpecName: "kube-api-access-wsjcz") pod "8a176030-0545-4a2b-b9c8-cce6a69dfdbe" (UID: "8a176030-0545-4a2b-b9c8-cce6a69dfdbe"). InnerVolumeSpecName "kube-api-access-wsjcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.326802 4816 generic.go:334] "Generic (PLEG): container finished" podID="8a176030-0545-4a2b-b9c8-cce6a69dfdbe" containerID="3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e" exitCode=137 Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.327593 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8a176030-0545-4a2b-b9c8-cce6a69dfdbe","Type":"ContainerDied","Data":"3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e"} Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.327727 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8a176030-0545-4a2b-b9c8-cce6a69dfdbe","Type":"ContainerDied","Data":"30d9610102c6f411831f23e50d4397f71defe39fc5789736d55487f2818e1509"} Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.327771 4816 scope.go:117] "RemoveContainer" containerID="3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.327608 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.355113 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a176030-0545-4a2b-b9c8-cce6a69dfdbe" (UID: "8a176030-0545-4a2b-b9c8-cce6a69dfdbe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.355297 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-config-data" (OuterVolumeSpecName: "config-data") pod "8a176030-0545-4a2b-b9c8-cce6a69dfdbe" (UID: "8a176030-0545-4a2b-b9c8-cce6a69dfdbe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.411529 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsjcz\" (UniqueName: \"kubernetes.io/projected/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-kube-api-access-wsjcz\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.411862 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.411873 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a176030-0545-4a2b-b9c8-cce6a69dfdbe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.422416 4816 scope.go:117] "RemoveContainer" containerID="3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e" Feb 16 13:25:17 crc kubenswrapper[4816]: E0216 13:25:17.601273 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e\": container with ID starting with 3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e not found: ID does not exist" containerID="3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.601326 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e"} err="failed to get container status \"3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e\": rpc error: code = NotFound desc = could not find container \"3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e\": container with ID starting with 3b24e5ad21971685372e394e7b9357108fc528f45dae241d32c6153d5c399b0e not found: ID does not exist" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.654329 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.662170 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.675295 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:25:17 crc kubenswrapper[4816]: E0216 13:25:17.675736 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a176030-0545-4a2b-b9c8-cce6a69dfdbe" containerName="nova-cell1-novncproxy-novncproxy" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.675754 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a176030-0545-4a2b-b9c8-cce6a69dfdbe" containerName="nova-cell1-novncproxy-novncproxy" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.675978 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a176030-0545-4a2b-b9c8-cce6a69dfdbe" containerName="nova-cell1-novncproxy-novncproxy" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.676613 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.685314 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.685457 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.685544 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.699021 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.700541 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.700606 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.700709 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.700797 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwr6x\" (UniqueName: \"kubernetes.io/projected/6ba4481b-c2a7-4156-b054-8179b24cdb66-kube-api-access-vwr6x\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.700843 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.803123 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.803185 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwr6x\" (UniqueName: \"kubernetes.io/projected/6ba4481b-c2a7-4156-b054-8179b24cdb66-kube-api-access-vwr6x\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.803218 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.803287 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.803318 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.807170 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.807279 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.807496 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.807735 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:17 crc kubenswrapper[4816]: I0216 13:25:17.825147 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwr6x\" (UniqueName: \"kubernetes.io/projected/6ba4481b-c2a7-4156-b054-8179b24cdb66-kube-api-access-vwr6x\") pod \"nova-cell1-novncproxy-0\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:18 crc kubenswrapper[4816]: I0216 13:25:18.002922 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:18 crc kubenswrapper[4816]: I0216 13:25:18.539898 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:25:19 crc kubenswrapper[4816]: I0216 13:25:19.417357 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a176030-0545-4a2b-b9c8-cce6a69dfdbe" path="/var/lib/kubelet/pods/8a176030-0545-4a2b-b9c8-cce6a69dfdbe/volumes" Feb 16 13:25:19 crc kubenswrapper[4816]: I0216 13:25:19.418436 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6ba4481b-c2a7-4156-b054-8179b24cdb66","Type":"ContainerStarted","Data":"227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3"} Feb 16 13:25:19 crc kubenswrapper[4816]: I0216 13:25:19.418471 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6ba4481b-c2a7-4156-b054-8179b24cdb66","Type":"ContainerStarted","Data":"6fda7aa0a6da8f35a801453e464e8ef6d330f708295fe46fc3a74f81ddc9a2cb"} Feb 16 13:25:19 crc kubenswrapper[4816]: I0216 13:25:19.438100 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.438079357 podStartE2EDuration="2.438079357s" podCreationTimestamp="2026-02-16 13:25:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:25:19.433692727 +0000 UTC m=+1318.760406455" watchObservedRunningTime="2026-02-16 13:25:19.438079357 +0000 UTC m=+1318.764793085" Feb 16 13:25:19 crc kubenswrapper[4816]: I0216 13:25:19.740198 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 13:25:19 crc kubenswrapper[4816]: I0216 13:25:19.740689 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 13:25:19 crc kubenswrapper[4816]: I0216 13:25:19.745391 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 13:25:19 crc kubenswrapper[4816]: I0216 13:25:19.746209 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.424798 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.428709 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.659718 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-smv6j"] Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.661306 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.673886 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-smv6j"] Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.751474 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.751522 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-config\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.751592 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.751630 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.751679 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.751706 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjzck\" (UniqueName: \"kubernetes.io/projected/a2ea4453-2b42-409d-bac8-b317e43dcf6a-kube-api-access-bjzck\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.853458 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.853519 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-config\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.853588 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.853643 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.853713 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.853741 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjzck\" (UniqueName: \"kubernetes.io/projected/a2ea4453-2b42-409d-bac8-b317e43dcf6a-kube-api-access-bjzck\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.855912 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.856474 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-config\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.857067 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.857568 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.858178 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.881080 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjzck\" (UniqueName: \"kubernetes.io/projected/a2ea4453-2b42-409d-bac8-b317e43dcf6a-kube-api-access-bjzck\") pod \"dnsmasq-dns-5c7b6c5df9-smv6j\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:20 crc kubenswrapper[4816]: I0216 13:25:20.985611 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:21 crc kubenswrapper[4816]: I0216 13:25:21.482476 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-smv6j"] Feb 16 13:25:22 crc kubenswrapper[4816]: I0216 13:25:22.462544 4816 generic.go:334] "Generic (PLEG): container finished" podID="a2ea4453-2b42-409d-bac8-b317e43dcf6a" containerID="d43de8d62d0459a351f147af09bf04360bdb291a156206a415357ae792d24702" exitCode=0 Feb 16 13:25:22 crc kubenswrapper[4816]: I0216 13:25:22.462604 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" event={"ID":"a2ea4453-2b42-409d-bac8-b317e43dcf6a","Type":"ContainerDied","Data":"d43de8d62d0459a351f147af09bf04360bdb291a156206a415357ae792d24702"} Feb 16 13:25:22 crc kubenswrapper[4816]: I0216 13:25:22.462955 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" event={"ID":"a2ea4453-2b42-409d-bac8-b317e43dcf6a","Type":"ContainerStarted","Data":"15cb8fe97ee36652a0b3cda7e7ab73a13b64e8f54bb9c313592b37634f035337"} Feb 16 13:25:23 crc kubenswrapper[4816]: I0216 13:25:23.043240 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:23 crc kubenswrapper[4816]: I0216 13:25:23.473056 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" event={"ID":"a2ea4453-2b42-409d-bac8-b317e43dcf6a","Type":"ContainerStarted","Data":"dbd428f503f966eeb3226bb68ba1f69e1b42e9b75e71221255c203cb87e57a4a"} Feb 16 13:25:23 crc kubenswrapper[4816]: I0216 13:25:23.473390 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:23 crc kubenswrapper[4816]: I0216 13:25:23.497628 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" podStartSLOduration=3.497609353 podStartE2EDuration="3.497609353s" podCreationTimestamp="2026-02-16 13:25:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:25:23.491184567 +0000 UTC m=+1322.817898295" watchObservedRunningTime="2026-02-16 13:25:23.497609353 +0000 UTC m=+1322.824323081" Feb 16 13:25:23 crc kubenswrapper[4816]: I0216 13:25:23.828940 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:23 crc kubenswrapper[4816]: I0216 13:25:23.829491 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-log" containerID="cri-o://4bb52d950b8046a9e69a5960c76c6af1619b06ddc7d4e281fdf48cfbcb9eceb0" gracePeriod=30 Feb 16 13:25:23 crc kubenswrapper[4816]: I0216 13:25:23.829829 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-api" containerID="cri-o://2421e0c16ddc2a0653671ff82c882fdcc48898092dd506013711b5886e1eb877" gracePeriod=30 Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.102996 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.103640 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="ceilometer-central-agent" containerID="cri-o://4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2" gracePeriod=30 Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.103816 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="proxy-httpd" containerID="cri-o://d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12" gracePeriod=30 Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.103863 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="sg-core" containerID="cri-o://2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1" gracePeriod=30 Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.103909 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="ceilometer-notification-agent" containerID="cri-o://fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23" gracePeriod=30 Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.111687 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.196:3000/\": read tcp 10.217.0.2:53480->10.217.0.196:3000: read: connection reset by peer" Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.484277 4816 generic.go:334] "Generic (PLEG): container finished" podID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerID="4bb52d950b8046a9e69a5960c76c6af1619b06ddc7d4e281fdf48cfbcb9eceb0" exitCode=143 Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.484366 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc","Type":"ContainerDied","Data":"4bb52d950b8046a9e69a5960c76c6af1619b06ddc7d4e281fdf48cfbcb9eceb0"} Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.488010 4816 generic.go:334] "Generic (PLEG): container finished" podID="48c0747c-1464-4d37-9b5b-3583ae413353" containerID="d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12" exitCode=0 Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.488045 4816 generic.go:334] "Generic (PLEG): container finished" podID="48c0747c-1464-4d37-9b5b-3583ae413353" containerID="2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1" exitCode=2 Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.488851 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerDied","Data":"d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12"} Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.488891 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerDied","Data":"2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1"} Feb 16 13:25:24 crc kubenswrapper[4816]: I0216 13:25:24.938058 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.106424 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-scripts\") pod \"48c0747c-1464-4d37-9b5b-3583ae413353\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.106503 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-ceilometer-tls-certs\") pod \"48c0747c-1464-4d37-9b5b-3583ae413353\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.106626 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-run-httpd\") pod \"48c0747c-1464-4d37-9b5b-3583ae413353\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.106752 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-sg-core-conf-yaml\") pod \"48c0747c-1464-4d37-9b5b-3583ae413353\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.106795 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-combined-ca-bundle\") pod \"48c0747c-1464-4d37-9b5b-3583ae413353\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.106833 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zn2q5\" (UniqueName: \"kubernetes.io/projected/48c0747c-1464-4d37-9b5b-3583ae413353-kube-api-access-zn2q5\") pod \"48c0747c-1464-4d37-9b5b-3583ae413353\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.106868 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-config-data\") pod \"48c0747c-1464-4d37-9b5b-3583ae413353\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.106903 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-log-httpd\") pod \"48c0747c-1464-4d37-9b5b-3583ae413353\" (UID: \"48c0747c-1464-4d37-9b5b-3583ae413353\") " Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.107617 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "48c0747c-1464-4d37-9b5b-3583ae413353" (UID: "48c0747c-1464-4d37-9b5b-3583ae413353"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.108147 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "48c0747c-1464-4d37-9b5b-3583ae413353" (UID: "48c0747c-1464-4d37-9b5b-3583ae413353"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.128898 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-scripts" (OuterVolumeSpecName: "scripts") pod "48c0747c-1464-4d37-9b5b-3583ae413353" (UID: "48c0747c-1464-4d37-9b5b-3583ae413353"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.128964 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48c0747c-1464-4d37-9b5b-3583ae413353-kube-api-access-zn2q5" (OuterVolumeSpecName: "kube-api-access-zn2q5") pod "48c0747c-1464-4d37-9b5b-3583ae413353" (UID: "48c0747c-1464-4d37-9b5b-3583ae413353"). InnerVolumeSpecName "kube-api-access-zn2q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.142278 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "48c0747c-1464-4d37-9b5b-3583ae413353" (UID: "48c0747c-1464-4d37-9b5b-3583ae413353"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.170981 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "48c0747c-1464-4d37-9b5b-3583ae413353" (UID: "48c0747c-1464-4d37-9b5b-3583ae413353"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.192404 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48c0747c-1464-4d37-9b5b-3583ae413353" (UID: "48c0747c-1464-4d37-9b5b-3583ae413353"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.209013 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.209060 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.209076 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.209088 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zn2q5\" (UniqueName: \"kubernetes.io/projected/48c0747c-1464-4d37-9b5b-3583ae413353-kube-api-access-zn2q5\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.209098 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48c0747c-1464-4d37-9b5b-3583ae413353-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.209108 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.209117 4816 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.228879 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-config-data" (OuterVolumeSpecName: "config-data") pod "48c0747c-1464-4d37-9b5b-3583ae413353" (UID: "48c0747c-1464-4d37-9b5b-3583ae413353"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.310639 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c0747c-1464-4d37-9b5b-3583ae413353-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.499599 4816 generic.go:334] "Generic (PLEG): container finished" podID="48c0747c-1464-4d37-9b5b-3583ae413353" containerID="fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23" exitCode=0 Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.499889 4816 generic.go:334] "Generic (PLEG): container finished" podID="48c0747c-1464-4d37-9b5b-3583ae413353" containerID="4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2" exitCode=0 Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.499762 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.499676 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerDied","Data":"fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23"} Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.500751 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerDied","Data":"4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2"} Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.500766 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48c0747c-1464-4d37-9b5b-3583ae413353","Type":"ContainerDied","Data":"6c30b0aabc6bde73faff064849bbc44391acced1f6ed463379605b6edd1429cd"} Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.500782 4816 scope.go:117] "RemoveContainer" containerID="d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.531781 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.555892 4816 scope.go:117] "RemoveContainer" containerID="2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.556268 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.578644 4816 scope.go:117] "RemoveContainer" containerID="fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.581151 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:25 crc kubenswrapper[4816]: E0216 13:25:25.581565 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="ceilometer-notification-agent" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.581589 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="ceilometer-notification-agent" Feb 16 13:25:25 crc kubenswrapper[4816]: E0216 13:25:25.581611 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="proxy-httpd" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.581620 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="proxy-httpd" Feb 16 13:25:25 crc kubenswrapper[4816]: E0216 13:25:25.581668 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="sg-core" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.581677 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="sg-core" Feb 16 13:25:25 crc kubenswrapper[4816]: E0216 13:25:25.581693 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="ceilometer-central-agent" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.581699 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="ceilometer-central-agent" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.581877 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="ceilometer-notification-agent" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.581896 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="proxy-httpd" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.581907 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="ceilometer-central-agent" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.581918 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" containerName="sg-core" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.584111 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.586692 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.586879 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.590905 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.595301 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.603598 4816 scope.go:117] "RemoveContainer" containerID="4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.622532 4816 scope.go:117] "RemoveContainer" containerID="d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12" Feb 16 13:25:25 crc kubenswrapper[4816]: E0216 13:25:25.622989 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12\": container with ID starting with d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12 not found: ID does not exist" containerID="d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.623026 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12"} err="failed to get container status \"d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12\": rpc error: code = NotFound desc = could not find container \"d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12\": container with ID starting with d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12 not found: ID does not exist" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.623052 4816 scope.go:117] "RemoveContainer" containerID="2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1" Feb 16 13:25:25 crc kubenswrapper[4816]: E0216 13:25:25.623425 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1\": container with ID starting with 2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1 not found: ID does not exist" containerID="2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.623477 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1"} err="failed to get container status \"2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1\": rpc error: code = NotFound desc = could not find container \"2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1\": container with ID starting with 2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1 not found: ID does not exist" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.623507 4816 scope.go:117] "RemoveContainer" containerID="fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23" Feb 16 13:25:25 crc kubenswrapper[4816]: E0216 13:25:25.623825 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23\": container with ID starting with fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23 not found: ID does not exist" containerID="fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.623850 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23"} err="failed to get container status \"fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23\": rpc error: code = NotFound desc = could not find container \"fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23\": container with ID starting with fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23 not found: ID does not exist" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.623864 4816 scope.go:117] "RemoveContainer" containerID="4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2" Feb 16 13:25:25 crc kubenswrapper[4816]: E0216 13:25:25.624153 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2\": container with ID starting with 4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2 not found: ID does not exist" containerID="4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.624181 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2"} err="failed to get container status \"4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2\": rpc error: code = NotFound desc = could not find container \"4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2\": container with ID starting with 4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2 not found: ID does not exist" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.624197 4816 scope.go:117] "RemoveContainer" containerID="d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.624473 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12"} err="failed to get container status \"d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12\": rpc error: code = NotFound desc = could not find container \"d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12\": container with ID starting with d3787c7d42332b2e59e9ffb8ba62fee49a288399b02e55a24db8c556e5ebab12 not found: ID does not exist" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.624494 4816 scope.go:117] "RemoveContainer" containerID="2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.624757 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1"} err="failed to get container status \"2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1\": rpc error: code = NotFound desc = could not find container \"2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1\": container with ID starting with 2990f797c19fc6f17f5ccba64a1ddeeaa2809459308f3d94069c05b820a4acb1 not found: ID does not exist" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.624784 4816 scope.go:117] "RemoveContainer" containerID="fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.625064 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23"} err="failed to get container status \"fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23\": rpc error: code = NotFound desc = could not find container \"fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23\": container with ID starting with fa4632c278ed89e5ab2ce2fea45075006a8e47ed662eabbb284e069c8e4a4f23 not found: ID does not exist" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.625092 4816 scope.go:117] "RemoveContainer" containerID="4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.625355 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2"} err="failed to get container status \"4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2\": rpc error: code = NotFound desc = could not find container \"4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2\": container with ID starting with 4bb065680d7e529c20726846ce0723257d1803fc1c711890f306adebd215dfd2 not found: ID does not exist" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.629858 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-scripts\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.629900 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-log-httpd\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.630021 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.630053 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.630078 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.630181 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-config-data\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.630262 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr4wf\" (UniqueName: \"kubernetes.io/projected/7bcb172e-dbe4-44ef-a00f-97e55968c160-kube-api-access-hr4wf\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.630468 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-run-httpd\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.732715 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-run-httpd\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.732800 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-scripts\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.732824 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-log-httpd\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.732901 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.732927 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.732946 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.733000 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-config-data\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.733023 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr4wf\" (UniqueName: \"kubernetes.io/projected/7bcb172e-dbe4-44ef-a00f-97e55968c160-kube-api-access-hr4wf\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.733974 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-run-httpd\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.735219 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-log-httpd\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.738396 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.738420 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-scripts\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.738901 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.739248 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-config-data\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.741214 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.749627 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr4wf\" (UniqueName: \"kubernetes.io/projected/7bcb172e-dbe4-44ef-a00f-97e55968c160-kube-api-access-hr4wf\") pod \"ceilometer-0\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " pod="openstack/ceilometer-0" Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.775344 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:25 crc kubenswrapper[4816]: I0216 13:25:25.776005 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:26 crc kubenswrapper[4816]: W0216 13:25:26.247230 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bcb172e_dbe4_44ef_a00f_97e55968c160.slice/crio-ebc23c7c2f0deb6ffbe62f3f0f1bd4aefe5883f37038fdf4fce53adcd4c786dd WatchSource:0}: Error finding container ebc23c7c2f0deb6ffbe62f3f0f1bd4aefe5883f37038fdf4fce53adcd4c786dd: Status 404 returned error can't find the container with id ebc23c7c2f0deb6ffbe62f3f0f1bd4aefe5883f37038fdf4fce53adcd4c786dd Feb 16 13:25:26 crc kubenswrapper[4816]: I0216 13:25:26.257044 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:26 crc kubenswrapper[4816]: I0216 13:25:26.512290 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerStarted","Data":"ebc23c7c2f0deb6ffbe62f3f0f1bd4aefe5883f37038fdf4fce53adcd4c786dd"} Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.426454 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48c0747c-1464-4d37-9b5b-3583ae413353" path="/var/lib/kubelet/pods/48c0747c-1464-4d37-9b5b-3583ae413353/volumes" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.522935 4816 generic.go:334] "Generic (PLEG): container finished" podID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerID="2421e0c16ddc2a0653671ff82c882fdcc48898092dd506013711b5886e1eb877" exitCode=0 Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.523029 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc","Type":"ContainerDied","Data":"2421e0c16ddc2a0653671ff82c882fdcc48898092dd506013711b5886e1eb877"} Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.523108 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc","Type":"ContainerDied","Data":"d45129601f57fa96dd03fedb977abd763cb6775a4116767eee15f40aed3b4ff6"} Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.523123 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d45129601f57fa96dd03fedb977abd763cb6775a4116767eee15f40aed3b4ff6" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.525255 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerStarted","Data":"6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7"} Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.600002 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.713542 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-config-data\") pod \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.714476 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dncqx\" (UniqueName: \"kubernetes.io/projected/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-kube-api-access-dncqx\") pod \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.714528 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-logs\") pod \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.714888 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-combined-ca-bundle\") pod \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\" (UID: \"aa01bd5c-1bc2-423a-bfa3-a0411bea1afc\") " Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.715880 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-logs" (OuterVolumeSpecName: "logs") pod "aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" (UID: "aa01bd5c-1bc2-423a-bfa3-a0411bea1afc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.720128 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-kube-api-access-dncqx" (OuterVolumeSpecName: "kube-api-access-dncqx") pod "aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" (UID: "aa01bd5c-1bc2-423a-bfa3-a0411bea1afc"). InnerVolumeSpecName "kube-api-access-dncqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.743531 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" (UID: "aa01bd5c-1bc2-423a-bfa3-a0411bea1afc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.755709 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-config-data" (OuterVolumeSpecName: "config-data") pod "aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" (UID: "aa01bd5c-1bc2-423a-bfa3-a0411bea1afc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.847168 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dncqx\" (UniqueName: \"kubernetes.io/projected/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-kube-api-access-dncqx\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.847204 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.847217 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:27 crc kubenswrapper[4816]: I0216 13:25:27.847228 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.004036 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.048861 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.534352 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerStarted","Data":"8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5"} Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.535417 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.563529 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.578723 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.594229 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.609790 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:28 crc kubenswrapper[4816]: E0216 13:25:28.610504 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-api" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.610538 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-api" Feb 16 13:25:28 crc kubenswrapper[4816]: E0216 13:25:28.610572 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-log" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.610585 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-log" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.610947 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-api" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.610979 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" containerName="nova-api-log" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.612721 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.615296 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.615947 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.616264 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.653553 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.768347 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-config-data\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.768400 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.768627 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.768838 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-public-tls-certs\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.768944 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6wnv\" (UniqueName: \"kubernetes.io/projected/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-kube-api-access-t6wnv\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.768992 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-logs\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.785049 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-wmrm4"] Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.786221 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.787852 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.787977 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.796792 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-wmrm4"] Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.870814 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-public-tls-certs\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.870893 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-logs\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.870918 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6wnv\" (UniqueName: \"kubernetes.io/projected/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-kube-api-access-t6wnv\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.871000 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-config-data\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.871034 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.871090 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.872954 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-logs\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.875942 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-config-data\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.876233 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.876758 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.876939 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-public-tls-certs\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.889331 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6wnv\" (UniqueName: \"kubernetes.io/projected/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-kube-api-access-t6wnv\") pod \"nova-api-0\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.930729 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.973276 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-scripts\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.973361 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v94jh\" (UniqueName: \"kubernetes.io/projected/138bffa0-61c1-4fee-951a-1f54bc811535-kube-api-access-v94jh\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.973415 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-config-data\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:28 crc kubenswrapper[4816]: I0216 13:25:28.973434 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.075321 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-scripts\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.075684 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v94jh\" (UniqueName: \"kubernetes.io/projected/138bffa0-61c1-4fee-951a-1f54bc811535-kube-api-access-v94jh\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.075742 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-config-data\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.075767 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.081105 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.087847 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-config-data\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.096198 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v94jh\" (UniqueName: \"kubernetes.io/projected/138bffa0-61c1-4fee-951a-1f54bc811535-kube-api-access-v94jh\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.096199 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-scripts\") pod \"nova-cell1-cell-mapping-wmrm4\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.105107 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.398210 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:29 crc kubenswrapper[4816]: I0216 13:25:29.433685 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa01bd5c-1bc2-423a-bfa3-a0411bea1afc" path="/var/lib/kubelet/pods/aa01bd5c-1bc2-423a-bfa3-a0411bea1afc/volumes" Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:29.543573 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerStarted","Data":"f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db"} Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:29.544747 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4d955b1f-cf64-49ed-9d42-2ebf086c22d8","Type":"ContainerStarted","Data":"2693b93b98912d21e4eb4cfc17dada1033cbb94be293a177a01d1bf625434904"} Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:29.625564 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-wmrm4"] Feb 16 13:25:30 crc kubenswrapper[4816]: W0216 13:25:29.627675 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod138bffa0_61c1_4fee_951a_1f54bc811535.slice/crio-a908c9883cbad1c42ee7739d6db94dd78ef8ecc54e4a1fbb3d26b02f4e2fc55a WatchSource:0}: Error finding container a908c9883cbad1c42ee7739d6db94dd78ef8ecc54e4a1fbb3d26b02f4e2fc55a: Status 404 returned error can't find the container with id a908c9883cbad1c42ee7739d6db94dd78ef8ecc54e4a1fbb3d26b02f4e2fc55a Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:30.566333 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wmrm4" event={"ID":"138bffa0-61c1-4fee-951a-1f54bc811535","Type":"ContainerStarted","Data":"b8de906ff2b39c6986fa45580797f26d39d0277c32281ccb7e9fc35db0d673c3"} Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:30.566731 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wmrm4" event={"ID":"138bffa0-61c1-4fee-951a-1f54bc811535","Type":"ContainerStarted","Data":"a908c9883cbad1c42ee7739d6db94dd78ef8ecc54e4a1fbb3d26b02f4e2fc55a"} Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:30.572536 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4d955b1f-cf64-49ed-9d42-2ebf086c22d8","Type":"ContainerStarted","Data":"e0fbdd58069c19d3ffac844c905f09ddbf609c85cfea329eff002c7de1af5f15"} Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:30.572580 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4d955b1f-cf64-49ed-9d42-2ebf086c22d8","Type":"ContainerStarted","Data":"d7feb4c53dc53125c062e8067f6e5ec9873ac06dd66f9a44a1c24cfcde3d1d65"} Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:30.599383 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-wmrm4" podStartSLOduration=2.599357319 podStartE2EDuration="2.599357319s" podCreationTimestamp="2026-02-16 13:25:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:25:30.584263987 +0000 UTC m=+1329.910977735" watchObservedRunningTime="2026-02-16 13:25:30.599357319 +0000 UTC m=+1329.926071087" Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:30.619092 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.619073098 podStartE2EDuration="2.619073098s" podCreationTimestamp="2026-02-16 13:25:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:25:30.599718029 +0000 UTC m=+1329.926431757" watchObservedRunningTime="2026-02-16 13:25:30.619073098 +0000 UTC m=+1329.945786826" Feb 16 13:25:30 crc kubenswrapper[4816]: I0216 13:25:30.986988 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.076140 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-khvwc"] Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.076361 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" podUID="d5392b80-e3a0-4695-a019-6eb5f23ca01c" containerName="dnsmasq-dns" containerID="cri-o://5c0de90e8dcfab5d056886954e128878aef7cca94e4c5037f744e9127ed2ae39" gracePeriod=10 Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.590706 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerStarted","Data":"1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4"} Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.592424 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="ceilometer-central-agent" containerID="cri-o://6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7" gracePeriod=30 Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.592814 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.593139 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="proxy-httpd" containerID="cri-o://1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4" gracePeriod=30 Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.593191 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="sg-core" containerID="cri-o://f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db" gracePeriod=30 Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.593236 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="ceilometer-notification-agent" containerID="cri-o://8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5" gracePeriod=30 Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.596009 4816 generic.go:334] "Generic (PLEG): container finished" podID="d5392b80-e3a0-4695-a019-6eb5f23ca01c" containerID="5c0de90e8dcfab5d056886954e128878aef7cca94e4c5037f744e9127ed2ae39" exitCode=0 Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.596918 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" event={"ID":"d5392b80-e3a0-4695-a019-6eb5f23ca01c","Type":"ContainerDied","Data":"5c0de90e8dcfab5d056886954e128878aef7cca94e4c5037f744e9127ed2ae39"} Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.596946 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" event={"ID":"d5392b80-e3a0-4695-a019-6eb5f23ca01c","Type":"ContainerDied","Data":"b3770c757a546e04ae303134d3bb9c2f8aa0dcb54f2c5cebbb11e49106f3b69d"} Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.596959 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3770c757a546e04ae303134d3bb9c2f8aa0dcb54f2c5cebbb11e49106f3b69d" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.639634 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.640646 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.257205236 podStartE2EDuration="6.640632599s" podCreationTimestamp="2026-02-16 13:25:25 +0000 UTC" firstStartedPulling="2026-02-16 13:25:26.249298168 +0000 UTC m=+1325.576011896" lastFinishedPulling="2026-02-16 13:25:30.632725521 +0000 UTC m=+1329.959439259" observedRunningTime="2026-02-16 13:25:31.624679304 +0000 UTC m=+1330.951393032" watchObservedRunningTime="2026-02-16 13:25:31.640632599 +0000 UTC m=+1330.967346327" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.740070 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-config\") pod \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.740256 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-svc\") pod \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.740416 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-sb\") pod \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.740498 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-nb\") pod \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.740575 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvqdt\" (UniqueName: \"kubernetes.io/projected/d5392b80-e3a0-4695-a019-6eb5f23ca01c-kube-api-access-gvqdt\") pod \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.740668 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-swift-storage-0\") pod \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\" (UID: \"d5392b80-e3a0-4695-a019-6eb5f23ca01c\") " Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.749365 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5392b80-e3a0-4695-a019-6eb5f23ca01c-kube-api-access-gvqdt" (OuterVolumeSpecName: "kube-api-access-gvqdt") pod "d5392b80-e3a0-4695-a019-6eb5f23ca01c" (UID: "d5392b80-e3a0-4695-a019-6eb5f23ca01c"). InnerVolumeSpecName "kube-api-access-gvqdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.801371 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d5392b80-e3a0-4695-a019-6eb5f23ca01c" (UID: "d5392b80-e3a0-4695-a019-6eb5f23ca01c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.802210 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d5392b80-e3a0-4695-a019-6eb5f23ca01c" (UID: "d5392b80-e3a0-4695-a019-6eb5f23ca01c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.809965 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d5392b80-e3a0-4695-a019-6eb5f23ca01c" (UID: "d5392b80-e3a0-4695-a019-6eb5f23ca01c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.820729 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d5392b80-e3a0-4695-a019-6eb5f23ca01c" (UID: "d5392b80-e3a0-4695-a019-6eb5f23ca01c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.824625 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-config" (OuterVolumeSpecName: "config") pod "d5392b80-e3a0-4695-a019-6eb5f23ca01c" (UID: "d5392b80-e3a0-4695-a019-6eb5f23ca01c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.843181 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.843217 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.843227 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvqdt\" (UniqueName: \"kubernetes.io/projected/d5392b80-e3a0-4695-a019-6eb5f23ca01c-kube-api-access-gvqdt\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.843240 4816 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.843250 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:31 crc kubenswrapper[4816]: I0216 13:25:31.843258 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5392b80-e3a0-4695-a019-6eb5f23ca01c-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:32 crc kubenswrapper[4816]: I0216 13:25:32.607802 4816 generic.go:334] "Generic (PLEG): container finished" podID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerID="1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4" exitCode=0 Feb 16 13:25:32 crc kubenswrapper[4816]: I0216 13:25:32.607844 4816 generic.go:334] "Generic (PLEG): container finished" podID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerID="f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db" exitCode=2 Feb 16 13:25:32 crc kubenswrapper[4816]: I0216 13:25:32.607854 4816 generic.go:334] "Generic (PLEG): container finished" podID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerID="8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5" exitCode=0 Feb 16 13:25:32 crc kubenswrapper[4816]: I0216 13:25:32.607926 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-khvwc" Feb 16 13:25:32 crc kubenswrapper[4816]: I0216 13:25:32.608818 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerDied","Data":"1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4"} Feb 16 13:25:32 crc kubenswrapper[4816]: I0216 13:25:32.608889 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerDied","Data":"f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db"} Feb 16 13:25:32 crc kubenswrapper[4816]: I0216 13:25:32.608908 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerDied","Data":"8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5"} Feb 16 13:25:32 crc kubenswrapper[4816]: I0216 13:25:32.647581 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-khvwc"] Feb 16 13:25:32 crc kubenswrapper[4816]: I0216 13:25:32.660851 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-khvwc"] Feb 16 13:25:33 crc kubenswrapper[4816]: I0216 13:25:33.417606 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5392b80-e3a0-4695-a019-6eb5f23ca01c" path="/var/lib/kubelet/pods/d5392b80-e3a0-4695-a019-6eb5f23ca01c/volumes" Feb 16 13:25:33 crc kubenswrapper[4816]: E0216 13:25:33.714543 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bcb172e_dbe4_44ef_a00f_97e55968c160.slice/crio-conmon-6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bcb172e_dbe4_44ef_a00f_97e55968c160.slice/crio-6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7.scope\": RecentStats: unable to find data in memory cache]" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.076977 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.199624 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-config-data\") pod \"7bcb172e-dbe4-44ef-a00f-97e55968c160\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.199857 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-combined-ca-bundle\") pod \"7bcb172e-dbe4-44ef-a00f-97e55968c160\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.199975 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-log-httpd\") pod \"7bcb172e-dbe4-44ef-a00f-97e55968c160\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.200101 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-ceilometer-tls-certs\") pod \"7bcb172e-dbe4-44ef-a00f-97e55968c160\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.200138 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hr4wf\" (UniqueName: \"kubernetes.io/projected/7bcb172e-dbe4-44ef-a00f-97e55968c160-kube-api-access-hr4wf\") pod \"7bcb172e-dbe4-44ef-a00f-97e55968c160\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.200189 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-scripts\") pod \"7bcb172e-dbe4-44ef-a00f-97e55968c160\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.200211 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-run-httpd\") pod \"7bcb172e-dbe4-44ef-a00f-97e55968c160\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.200265 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-sg-core-conf-yaml\") pod \"7bcb172e-dbe4-44ef-a00f-97e55968c160\" (UID: \"7bcb172e-dbe4-44ef-a00f-97e55968c160\") " Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.200785 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7bcb172e-dbe4-44ef-a00f-97e55968c160" (UID: "7bcb172e-dbe4-44ef-a00f-97e55968c160"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.200855 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7bcb172e-dbe4-44ef-a00f-97e55968c160" (UID: "7bcb172e-dbe4-44ef-a00f-97e55968c160"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.200937 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.206316 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bcb172e-dbe4-44ef-a00f-97e55968c160-kube-api-access-hr4wf" (OuterVolumeSpecName: "kube-api-access-hr4wf") pod "7bcb172e-dbe4-44ef-a00f-97e55968c160" (UID: "7bcb172e-dbe4-44ef-a00f-97e55968c160"). InnerVolumeSpecName "kube-api-access-hr4wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.207693 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-scripts" (OuterVolumeSpecName: "scripts") pod "7bcb172e-dbe4-44ef-a00f-97e55968c160" (UID: "7bcb172e-dbe4-44ef-a00f-97e55968c160"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.235859 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7bcb172e-dbe4-44ef-a00f-97e55968c160" (UID: "7bcb172e-dbe4-44ef-a00f-97e55968c160"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.249886 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "7bcb172e-dbe4-44ef-a00f-97e55968c160" (UID: "7bcb172e-dbe4-44ef-a00f-97e55968c160"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.285270 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7bcb172e-dbe4-44ef-a00f-97e55968c160" (UID: "7bcb172e-dbe4-44ef-a00f-97e55968c160"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.298920 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-config-data" (OuterVolumeSpecName: "config-data") pod "7bcb172e-dbe4-44ef-a00f-97e55968c160" (UID: "7bcb172e-dbe4-44ef-a00f-97e55968c160"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.302710 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bcb172e-dbe4-44ef-a00f-97e55968c160-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.302794 4816 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.302823 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hr4wf\" (UniqueName: \"kubernetes.io/projected/7bcb172e-dbe4-44ef-a00f-97e55968c160-kube-api-access-hr4wf\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.302842 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.302861 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.302879 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.302896 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bcb172e-dbe4-44ef-a00f-97e55968c160-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.629505 4816 generic.go:334] "Generic (PLEG): container finished" podID="138bffa0-61c1-4fee-951a-1f54bc811535" containerID="b8de906ff2b39c6986fa45580797f26d39d0277c32281ccb7e9fc35db0d673c3" exitCode=0 Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.629600 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wmrm4" event={"ID":"138bffa0-61c1-4fee-951a-1f54bc811535","Type":"ContainerDied","Data":"b8de906ff2b39c6986fa45580797f26d39d0277c32281ccb7e9fc35db0d673c3"} Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.634426 4816 generic.go:334] "Generic (PLEG): container finished" podID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerID="6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7" exitCode=0 Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.634494 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerDied","Data":"6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7"} Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.634533 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bcb172e-dbe4-44ef-a00f-97e55968c160","Type":"ContainerDied","Data":"ebc23c7c2f0deb6ffbe62f3f0f1bd4aefe5883f37038fdf4fce53adcd4c786dd"} Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.634561 4816 scope.go:117] "RemoveContainer" containerID="1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.634808 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.680129 4816 scope.go:117] "RemoveContainer" containerID="f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.706860 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.719976 4816 scope.go:117] "RemoveContainer" containerID="8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.726167 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.735873 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.736370 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="ceilometer-notification-agent" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736393 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="ceilometer-notification-agent" Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.736408 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="sg-core" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736417 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="sg-core" Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.736430 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5392b80-e3a0-4695-a019-6eb5f23ca01c" containerName="dnsmasq-dns" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736438 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5392b80-e3a0-4695-a019-6eb5f23ca01c" containerName="dnsmasq-dns" Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.736472 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="proxy-httpd" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736481 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="proxy-httpd" Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.736499 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5392b80-e3a0-4695-a019-6eb5f23ca01c" containerName="init" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736508 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5392b80-e3a0-4695-a019-6eb5f23ca01c" containerName="init" Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.736525 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="ceilometer-central-agent" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736533 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="ceilometer-central-agent" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736847 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="ceilometer-central-agent" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736880 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="proxy-httpd" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736902 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5392b80-e3a0-4695-a019-6eb5f23ca01c" containerName="dnsmasq-dns" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736925 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="ceilometer-notification-agent" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.736936 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" containerName="sg-core" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.740144 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.746565 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.746703 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.746857 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.762387 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.781333 4816 scope.go:117] "RemoveContainer" containerID="6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.801039 4816 scope.go:117] "RemoveContainer" containerID="1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4" Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.801394 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4\": container with ID starting with 1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4 not found: ID does not exist" containerID="1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.801451 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4"} err="failed to get container status \"1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4\": rpc error: code = NotFound desc = could not find container \"1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4\": container with ID starting with 1f8709a3841d420a30a674a669e96efefb35afdae919046340749ea7ac93f8d4 not found: ID does not exist" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.801480 4816 scope.go:117] "RemoveContainer" containerID="f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db" Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.802084 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db\": container with ID starting with f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db not found: ID does not exist" containerID="f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.802135 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db"} err="failed to get container status \"f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db\": rpc error: code = NotFound desc = could not find container \"f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db\": container with ID starting with f7e6740c53faf37ffe3945f212662cf8a9469f022e153629032be31de7ba96db not found: ID does not exist" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.802429 4816 scope.go:117] "RemoveContainer" containerID="8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5" Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.802835 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5\": container with ID starting with 8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5 not found: ID does not exist" containerID="8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.802873 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5"} err="failed to get container status \"8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5\": rpc error: code = NotFound desc = could not find container \"8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5\": container with ID starting with 8010a5318b76584934a4132405846d927eed648020a4ef68430acbef2761c7a5 not found: ID does not exist" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.802897 4816 scope.go:117] "RemoveContainer" containerID="6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7" Feb 16 13:25:34 crc kubenswrapper[4816]: E0216 13:25:34.803156 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7\": container with ID starting with 6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7 not found: ID does not exist" containerID="6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.803183 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7"} err="failed to get container status \"6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7\": rpc error: code = NotFound desc = could not find container \"6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7\": container with ID starting with 6f479322b00627e5253f82687f03ddf44791a89539f6cf38fe60345cc98f08a7 not found: ID does not exist" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.920264 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-log-httpd\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.920326 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.920361 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.920585 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-config-data\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.920643 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltqzd\" (UniqueName: \"kubernetes.io/projected/9da68b25-a924-4d2e-82a3-c635014f32e9-kube-api-access-ltqzd\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.920690 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-scripts\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.920738 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-run-httpd\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:34 crc kubenswrapper[4816]: I0216 13:25:34.920809 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.022906 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-config-data\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.022968 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltqzd\" (UniqueName: \"kubernetes.io/projected/9da68b25-a924-4d2e-82a3-c635014f32e9-kube-api-access-ltqzd\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.022994 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-scripts\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.023030 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-run-httpd\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.023074 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.023165 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-log-httpd\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.023189 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.023219 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.023898 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-log-httpd\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.023980 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-run-httpd\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.042430 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.043124 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-scripts\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.044019 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-config-data\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.045597 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.055915 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.059700 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltqzd\" (UniqueName: \"kubernetes.io/projected/9da68b25-a924-4d2e-82a3-c635014f32e9-kube-api-access-ltqzd\") pod \"ceilometer-0\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.096014 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.409799 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bcb172e-dbe4-44ef-a00f-97e55968c160" path="/var/lib/kubelet/pods/7bcb172e-dbe4-44ef-a00f-97e55968c160/volumes" Feb 16 13:25:35 crc kubenswrapper[4816]: W0216 13:25:35.574735 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9da68b25_a924_4d2e_82a3_c635014f32e9.slice/crio-e615eee864ce675143a21ef09bc04749ff7242fd1df9e2795e1e1b28f6b63bda WatchSource:0}: Error finding container e615eee864ce675143a21ef09bc04749ff7242fd1df9e2795e1e1b28f6b63bda: Status 404 returned error can't find the container with id e615eee864ce675143a21ef09bc04749ff7242fd1df9e2795e1e1b28f6b63bda Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.580223 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.646836 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerStarted","Data":"e615eee864ce675143a21ef09bc04749ff7242fd1df9e2795e1e1b28f6b63bda"} Feb 16 13:25:35 crc kubenswrapper[4816]: I0216 13:25:35.960105 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.147049 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-config-data\") pod \"138bffa0-61c1-4fee-951a-1f54bc811535\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.147097 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-scripts\") pod \"138bffa0-61c1-4fee-951a-1f54bc811535\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.147140 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-combined-ca-bundle\") pod \"138bffa0-61c1-4fee-951a-1f54bc811535\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.147205 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v94jh\" (UniqueName: \"kubernetes.io/projected/138bffa0-61c1-4fee-951a-1f54bc811535-kube-api-access-v94jh\") pod \"138bffa0-61c1-4fee-951a-1f54bc811535\" (UID: \"138bffa0-61c1-4fee-951a-1f54bc811535\") " Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.151212 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-scripts" (OuterVolumeSpecName: "scripts") pod "138bffa0-61c1-4fee-951a-1f54bc811535" (UID: "138bffa0-61c1-4fee-951a-1f54bc811535"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.152109 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/138bffa0-61c1-4fee-951a-1f54bc811535-kube-api-access-v94jh" (OuterVolumeSpecName: "kube-api-access-v94jh") pod "138bffa0-61c1-4fee-951a-1f54bc811535" (UID: "138bffa0-61c1-4fee-951a-1f54bc811535"). InnerVolumeSpecName "kube-api-access-v94jh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.174935 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-config-data" (OuterVolumeSpecName: "config-data") pod "138bffa0-61c1-4fee-951a-1f54bc811535" (UID: "138bffa0-61c1-4fee-951a-1f54bc811535"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.175809 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "138bffa0-61c1-4fee-951a-1f54bc811535" (UID: "138bffa0-61c1-4fee-951a-1f54bc811535"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.249094 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.249136 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.249145 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/138bffa0-61c1-4fee-951a-1f54bc811535-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.249155 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v94jh\" (UniqueName: \"kubernetes.io/projected/138bffa0-61c1-4fee-951a-1f54bc811535-kube-api-access-v94jh\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.658904 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wmrm4" event={"ID":"138bffa0-61c1-4fee-951a-1f54bc811535","Type":"ContainerDied","Data":"a908c9883cbad1c42ee7739d6db94dd78ef8ecc54e4a1fbb3d26b02f4e2fc55a"} Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.659281 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a908c9883cbad1c42ee7739d6db94dd78ef8ecc54e4a1fbb3d26b02f4e2fc55a" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.658922 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wmrm4" Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.660445 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerStarted","Data":"f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c"} Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.753840 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.754134 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerName="nova-api-log" containerID="cri-o://d7feb4c53dc53125c062e8067f6e5ec9873ac06dd66f9a44a1c24cfcde3d1d65" gracePeriod=30 Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.754232 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerName="nova-api-api" containerID="cri-o://e0fbdd58069c19d3ffac844c905f09ddbf609c85cfea329eff002c7de1af5f15" gracePeriod=30 Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.767284 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.767962 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f" containerName="nova-scheduler-scheduler" containerID="cri-o://83122580674bda87f2a57af7d86fc48d318d72637c6ef769957dc9804b92ab66" gracePeriod=30 Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.838244 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.838570 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-metadata" containerID="cri-o://4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee" gracePeriod=30 Feb 16 13:25:36 crc kubenswrapper[4816]: I0216 13:25:36.838908 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-log" containerID="cri-o://e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7" gracePeriod=30 Feb 16 13:25:37 crc kubenswrapper[4816]: I0216 13:25:37.678630 4816 generic.go:334] "Generic (PLEG): container finished" podID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerID="e0fbdd58069c19d3ffac844c905f09ddbf609c85cfea329eff002c7de1af5f15" exitCode=0 Feb 16 13:25:37 crc kubenswrapper[4816]: I0216 13:25:37.678946 4816 generic.go:334] "Generic (PLEG): container finished" podID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerID="d7feb4c53dc53125c062e8067f6e5ec9873ac06dd66f9a44a1c24cfcde3d1d65" exitCode=143 Feb 16 13:25:37 crc kubenswrapper[4816]: I0216 13:25:37.678694 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4d955b1f-cf64-49ed-9d42-2ebf086c22d8","Type":"ContainerDied","Data":"e0fbdd58069c19d3ffac844c905f09ddbf609c85cfea329eff002c7de1af5f15"} Feb 16 13:25:37 crc kubenswrapper[4816]: I0216 13:25:37.679128 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4d955b1f-cf64-49ed-9d42-2ebf086c22d8","Type":"ContainerDied","Data":"d7feb4c53dc53125c062e8067f6e5ec9873ac06dd66f9a44a1c24cfcde3d1d65"} Feb 16 13:25:37 crc kubenswrapper[4816]: I0216 13:25:37.692994 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerStarted","Data":"f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08"} Feb 16 13:25:37 crc kubenswrapper[4816]: I0216 13:25:37.698442 4816 generic.go:334] "Generic (PLEG): container finished" podID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerID="e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7" exitCode=143 Feb 16 13:25:37 crc kubenswrapper[4816]: I0216 13:25:37.698487 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f5330f89-cc28-46cd-9119-a2fb00da6220","Type":"ContainerDied","Data":"e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7"} Feb 16 13:25:37 crc kubenswrapper[4816]: I0216 13:25:37.969754 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.084882 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-config-data\") pod \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.084943 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-logs\") pod \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.084981 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-public-tls-certs\") pod \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.085070 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-combined-ca-bundle\") pod \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.085207 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6wnv\" (UniqueName: \"kubernetes.io/projected/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-kube-api-access-t6wnv\") pod \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.085268 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-internal-tls-certs\") pod \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\" (UID: \"4d955b1f-cf64-49ed-9d42-2ebf086c22d8\") " Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.087096 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-logs" (OuterVolumeSpecName: "logs") pod "4d955b1f-cf64-49ed-9d42-2ebf086c22d8" (UID: "4d955b1f-cf64-49ed-9d42-2ebf086c22d8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.110814 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-kube-api-access-t6wnv" (OuterVolumeSpecName: "kube-api-access-t6wnv") pod "4d955b1f-cf64-49ed-9d42-2ebf086c22d8" (UID: "4d955b1f-cf64-49ed-9d42-2ebf086c22d8"). InnerVolumeSpecName "kube-api-access-t6wnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.116631 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-config-data" (OuterVolumeSpecName: "config-data") pod "4d955b1f-cf64-49ed-9d42-2ebf086c22d8" (UID: "4d955b1f-cf64-49ed-9d42-2ebf086c22d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.118135 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4d955b1f-cf64-49ed-9d42-2ebf086c22d8" (UID: "4d955b1f-cf64-49ed-9d42-2ebf086c22d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.152127 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4d955b1f-cf64-49ed-9d42-2ebf086c22d8" (UID: "4d955b1f-cf64-49ed-9d42-2ebf086c22d8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.154382 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4d955b1f-cf64-49ed-9d42-2ebf086c22d8" (UID: "4d955b1f-cf64-49ed-9d42-2ebf086c22d8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.187882 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.187926 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6wnv\" (UniqueName: \"kubernetes.io/projected/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-kube-api-access-t6wnv\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.187958 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.187969 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.187980 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.187988 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d955b1f-cf64-49ed-9d42-2ebf086c22d8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.709012 4816 generic.go:334] "Generic (PLEG): container finished" podID="539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f" containerID="83122580674bda87f2a57af7d86fc48d318d72637c6ef769957dc9804b92ab66" exitCode=0 Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.709127 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f","Type":"ContainerDied","Data":"83122580674bda87f2a57af7d86fc48d318d72637c6ef769957dc9804b92ab66"} Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.713079 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4d955b1f-cf64-49ed-9d42-2ebf086c22d8","Type":"ContainerDied","Data":"2693b93b98912d21e4eb4cfc17dada1033cbb94be293a177a01d1bf625434904"} Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.713140 4816 scope.go:117] "RemoveContainer" containerID="e0fbdd58069c19d3ffac844c905f09ddbf609c85cfea329eff002c7de1af5f15" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.713102 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.718133 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerStarted","Data":"d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c"} Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.752750 4816 scope.go:117] "RemoveContainer" containerID="d7feb4c53dc53125c062e8067f6e5ec9873ac06dd66f9a44a1c24cfcde3d1d65" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.761092 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.800854 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.821038 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.840752 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:38 crc kubenswrapper[4816]: E0216 13:25:38.843188 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerName="nova-api-log" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.843255 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerName="nova-api-log" Feb 16 13:25:38 crc kubenswrapper[4816]: E0216 13:25:38.843358 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f" containerName="nova-scheduler-scheduler" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.843381 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f" containerName="nova-scheduler-scheduler" Feb 16 13:25:38 crc kubenswrapper[4816]: E0216 13:25:38.843430 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="138bffa0-61c1-4fee-951a-1f54bc811535" containerName="nova-manage" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.843440 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="138bffa0-61c1-4fee-951a-1f54bc811535" containerName="nova-manage" Feb 16 13:25:38 crc kubenswrapper[4816]: E0216 13:25:38.843469 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerName="nova-api-api" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.843478 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerName="nova-api-api" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.844365 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerName="nova-api-log" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.844408 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="138bffa0-61c1-4fee-951a-1f54bc811535" containerName="nova-manage" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.844451 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f" containerName="nova-scheduler-scheduler" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.844471 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" containerName="nova-api-api" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.847388 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.857748 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.857915 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.858014 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:38 crc kubenswrapper[4816]: I0216 13:25:38.877168 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.003425 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hpdm\" (UniqueName: \"kubernetes.io/projected/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-kube-api-access-8hpdm\") pod \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.003506 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-combined-ca-bundle\") pod \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.003558 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-config-data\") pod \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\" (UID: \"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f\") " Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.004809 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27fde082-22f6-49b6-9750-796875a2fe49-logs\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.004865 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-config-data\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.004935 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.005818 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-public-tls-certs\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.006534 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp5v7\" (UniqueName: \"kubernetes.io/projected/27fde082-22f6-49b6-9750-796875a2fe49-kube-api-access-dp5v7\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.006683 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-internal-tls-certs\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.009009 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-kube-api-access-8hpdm" (OuterVolumeSpecName: "kube-api-access-8hpdm") pod "539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f" (UID: "539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f"). InnerVolumeSpecName "kube-api-access-8hpdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.047736 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-config-data" (OuterVolumeSpecName: "config-data") pod "539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f" (UID: "539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.047755 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f" (UID: "539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.107670 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-public-tls-certs\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.107752 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp5v7\" (UniqueName: \"kubernetes.io/projected/27fde082-22f6-49b6-9750-796875a2fe49-kube-api-access-dp5v7\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.107802 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-internal-tls-certs\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.107825 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27fde082-22f6-49b6-9750-796875a2fe49-logs\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.107850 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-config-data\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.107888 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.107949 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hpdm\" (UniqueName: \"kubernetes.io/projected/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-kube-api-access-8hpdm\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.107961 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.107971 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.109481 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27fde082-22f6-49b6-9750-796875a2fe49-logs\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.113556 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-public-tls-certs\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.115605 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.117139 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-internal-tls-certs\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.118083 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-config-data\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.140231 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp5v7\" (UniqueName: \"kubernetes.io/projected/27fde082-22f6-49b6-9750-796875a2fe49-kube-api-access-dp5v7\") pod \"nova-api-0\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.224014 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.423717 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d955b1f-cf64-49ed-9d42-2ebf086c22d8" path="/var/lib/kubelet/pods/4d955b1f-cf64-49ed-9d42-2ebf086c22d8/volumes" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.738390 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerStarted","Data":"daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552"} Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.739726 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.741689 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f","Type":"ContainerDied","Data":"d9e5a902fc9aaf1fd6ad4a66d09aa352b431c3bfd239f29b92dea43d20055448"} Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.741744 4816 scope.go:117] "RemoveContainer" containerID="83122580674bda87f2a57af7d86fc48d318d72637c6ef769957dc9804b92ab66" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.741890 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:25:39 crc kubenswrapper[4816]: W0216 13:25:39.747089 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27fde082_22f6_49b6_9750_796875a2fe49.slice/crio-8554fb823081f64a56e1bbc1f9c4c51204feaa39a0fe5f9921b3e6681366287e WatchSource:0}: Error finding container 8554fb823081f64a56e1bbc1f9c4c51204feaa39a0fe5f9921b3e6681366287e: Status 404 returned error can't find the container with id 8554fb823081f64a56e1bbc1f9c4c51204feaa39a0fe5f9921b3e6681366287e Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.753396 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.771481 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.133543022 podStartE2EDuration="5.771460783s" podCreationTimestamp="2026-02-16 13:25:34 +0000 UTC" firstStartedPulling="2026-02-16 13:25:35.578013109 +0000 UTC m=+1334.904726837" lastFinishedPulling="2026-02-16 13:25:39.21593087 +0000 UTC m=+1338.542644598" observedRunningTime="2026-02-16 13:25:39.76552211 +0000 UTC m=+1339.092235868" watchObservedRunningTime="2026-02-16 13:25:39.771460783 +0000 UTC m=+1339.098174511" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.792543 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.805092 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.814870 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.817357 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.821086 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.849460 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.921704 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " pod="openstack/nova-scheduler-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.922485 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-config-data\") pod \"nova-scheduler-0\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " pod="openstack/nova-scheduler-0" Feb 16 13:25:39 crc kubenswrapper[4816]: I0216 13:25:39.923468 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9qvl\" (UniqueName: \"kubernetes.io/projected/7554f0b7-4174-4950-ab00-aa21ecf64b56-kube-api-access-q9qvl\") pod \"nova-scheduler-0\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " pod="openstack/nova-scheduler-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.024700 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9qvl\" (UniqueName: \"kubernetes.io/projected/7554f0b7-4174-4950-ab00-aa21ecf64b56-kube-api-access-q9qvl\") pod \"nova-scheduler-0\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " pod="openstack/nova-scheduler-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.024805 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " pod="openstack/nova-scheduler-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.024863 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-config-data\") pod \"nova-scheduler-0\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " pod="openstack/nova-scheduler-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.028080 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-config-data\") pod \"nova-scheduler-0\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " pod="openstack/nova-scheduler-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.029272 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " pod="openstack/nova-scheduler-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.043150 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9qvl\" (UniqueName: \"kubernetes.io/projected/7554f0b7-4174-4950-ab00-aa21ecf64b56-kube-api-access-q9qvl\") pod \"nova-scheduler-0\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " pod="openstack/nova-scheduler-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.147118 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.164752 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:42092->10.217.0.191:8775: read: connection reset by peer" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.165135 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:42094->10.217.0.191:8775: read: connection reset by peer" Feb 16 13:25:40 crc kubenswrapper[4816]: W0216 13:25:40.586517 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7554f0b7_4174_4950_ab00_aa21ecf64b56.slice/crio-9d56845240197e82d29460a91a83ca0d499e76a7a52f250d63481d6050300cd4 WatchSource:0}: Error finding container 9d56845240197e82d29460a91a83ca0d499e76a7a52f250d63481d6050300cd4: Status 404 returned error can't find the container with id 9d56845240197e82d29460a91a83ca0d499e76a7a52f250d63481d6050300cd4 Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.601545 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.612924 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.740456 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-nova-metadata-tls-certs\") pod \"f5330f89-cc28-46cd-9119-a2fb00da6220\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.740807 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5330f89-cc28-46cd-9119-a2fb00da6220-logs\") pod \"f5330f89-cc28-46cd-9119-a2fb00da6220\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.740865 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48r82\" (UniqueName: \"kubernetes.io/projected/f5330f89-cc28-46cd-9119-a2fb00da6220-kube-api-access-48r82\") pod \"f5330f89-cc28-46cd-9119-a2fb00da6220\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.740916 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-combined-ca-bundle\") pod \"f5330f89-cc28-46cd-9119-a2fb00da6220\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.740989 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-config-data\") pod \"f5330f89-cc28-46cd-9119-a2fb00da6220\" (UID: \"f5330f89-cc28-46cd-9119-a2fb00da6220\") " Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.741603 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5330f89-cc28-46cd-9119-a2fb00da6220-logs" (OuterVolumeSpecName: "logs") pod "f5330f89-cc28-46cd-9119-a2fb00da6220" (UID: "f5330f89-cc28-46cd-9119-a2fb00da6220"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.744335 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5330f89-cc28-46cd-9119-a2fb00da6220-kube-api-access-48r82" (OuterVolumeSpecName: "kube-api-access-48r82") pod "f5330f89-cc28-46cd-9119-a2fb00da6220" (UID: "f5330f89-cc28-46cd-9119-a2fb00da6220"). InnerVolumeSpecName "kube-api-access-48r82". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.761025 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27fde082-22f6-49b6-9750-796875a2fe49","Type":"ContainerStarted","Data":"0835c326e6522f8ce700ad606e2e71a3e72b02b2702bf969281105422fd2bf4b"} Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.761079 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27fde082-22f6-49b6-9750-796875a2fe49","Type":"ContainerStarted","Data":"bc1beb983a2e186cb5db4c2a6fcde47de90c5bb66dbd822cf870630ece875a2b"} Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.761090 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27fde082-22f6-49b6-9750-796875a2fe49","Type":"ContainerStarted","Data":"8554fb823081f64a56e1bbc1f9c4c51204feaa39a0fe5f9921b3e6681366287e"} Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.770566 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7554f0b7-4174-4950-ab00-aa21ecf64b56","Type":"ContainerStarted","Data":"9d56845240197e82d29460a91a83ca0d499e76a7a52f250d63481d6050300cd4"} Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.777480 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5330f89-cc28-46cd-9119-a2fb00da6220" (UID: "f5330f89-cc28-46cd-9119-a2fb00da6220"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.785613 4816 generic.go:334] "Generic (PLEG): container finished" podID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerID="4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee" exitCode=0 Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.785735 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f5330f89-cc28-46cd-9119-a2fb00da6220","Type":"ContainerDied","Data":"4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee"} Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.785767 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f5330f89-cc28-46cd-9119-a2fb00da6220","Type":"ContainerDied","Data":"d90f4ac56544a200da5ad544fa476c92a517b1b392e574b9f936f8cce0c47180"} Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.785791 4816 scope.go:117] "RemoveContainer" containerID="4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.785934 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.791953 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.791921994 podStartE2EDuration="2.791921994s" podCreationTimestamp="2026-02-16 13:25:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:25:40.781238422 +0000 UTC m=+1340.107952150" watchObservedRunningTime="2026-02-16 13:25:40.791921994 +0000 UTC m=+1340.118635722" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.799829 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-config-data" (OuterVolumeSpecName: "config-data") pod "f5330f89-cc28-46cd-9119-a2fb00da6220" (UID: "f5330f89-cc28-46cd-9119-a2fb00da6220"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.811367 4816 scope.go:117] "RemoveContainer" containerID="e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.817913 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "f5330f89-cc28-46cd-9119-a2fb00da6220" (UID: "f5330f89-cc28-46cd-9119-a2fb00da6220"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.836779 4816 scope.go:117] "RemoveContainer" containerID="4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee" Feb 16 13:25:40 crc kubenswrapper[4816]: E0216 13:25:40.837487 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee\": container with ID starting with 4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee not found: ID does not exist" containerID="4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.837538 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee"} err="failed to get container status \"4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee\": rpc error: code = NotFound desc = could not find container \"4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee\": container with ID starting with 4628658b76c90deacaff76ea8309ac47a400a7ff8f50b9e48165ec18070899ee not found: ID does not exist" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.837568 4816 scope.go:117] "RemoveContainer" containerID="e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7" Feb 16 13:25:40 crc kubenswrapper[4816]: E0216 13:25:40.837918 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7\": container with ID starting with e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7 not found: ID does not exist" containerID="e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.837953 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7"} err="failed to get container status \"e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7\": rpc error: code = NotFound desc = could not find container \"e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7\": container with ID starting with e8e6daf4c00f71dcae32c48d1835c8b4f5f07555acb30ac5289f05c599b278b7 not found: ID does not exist" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.843121 4816 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.843152 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5330f89-cc28-46cd-9119-a2fb00da6220-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.843162 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48r82\" (UniqueName: \"kubernetes.io/projected/f5330f89-cc28-46cd-9119-a2fb00da6220-kube-api-access-48r82\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.843174 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:40 crc kubenswrapper[4816]: I0216 13:25:40.843184 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5330f89-cc28-46cd-9119-a2fb00da6220-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.122586 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.133721 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.150510 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:25:41 crc kubenswrapper[4816]: E0216 13:25:41.151187 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-log" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.151210 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-log" Feb 16 13:25:41 crc kubenswrapper[4816]: E0216 13:25:41.151236 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-metadata" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.151247 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-metadata" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.151488 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-log" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.151514 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" containerName="nova-metadata-metadata" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.152768 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.157097 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.157171 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.167529 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.251705 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgk27\" (UniqueName: \"kubernetes.io/projected/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-kube-api-access-lgk27\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.251785 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.251825 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-logs\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.251885 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.251977 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-config-data\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.353834 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgk27\" (UniqueName: \"kubernetes.io/projected/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-kube-api-access-lgk27\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.354264 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.354935 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-logs\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.354982 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.355014 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-config-data\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.355434 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-logs\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.363525 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.363601 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-config-data\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.368132 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.374324 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgk27\" (UniqueName: \"kubernetes.io/projected/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-kube-api-access-lgk27\") pod \"nova-metadata-0\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.416859 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f" path="/var/lib/kubelet/pods/539fb4f7-c0c0-4cf4-a7c5-fde5cff7997f/volumes" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.417781 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5330f89-cc28-46cd-9119-a2fb00da6220" path="/var/lib/kubelet/pods/f5330f89-cc28-46cd-9119-a2fb00da6220/volumes" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.473966 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.801072 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7554f0b7-4174-4950-ab00-aa21ecf64b56","Type":"ContainerStarted","Data":"bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663"} Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.833118 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.833095711 podStartE2EDuration="2.833095711s" podCreationTimestamp="2026-02-16 13:25:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:25:41.821549996 +0000 UTC m=+1341.148263744" watchObservedRunningTime="2026-02-16 13:25:41.833095711 +0000 UTC m=+1341.159809459" Feb 16 13:25:41 crc kubenswrapper[4816]: I0216 13:25:41.936484 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:25:42 crc kubenswrapper[4816]: I0216 13:25:42.815397 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"892fbdbb-3c72-45c9-8987-4bd9a01ddf98","Type":"ContainerStarted","Data":"bdb9b742ad3735c49ae99cebe69749569e35206e6f4aa144a488ecb8a622f11d"} Feb 16 13:25:42 crc kubenswrapper[4816]: I0216 13:25:42.815770 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"892fbdbb-3c72-45c9-8987-4bd9a01ddf98","Type":"ContainerStarted","Data":"4960b5e9a538e65382b2f5a58519a9af1c316bd816511a7121ed68a138e98322"} Feb 16 13:25:42 crc kubenswrapper[4816]: I0216 13:25:42.815789 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"892fbdbb-3c72-45c9-8987-4bd9a01ddf98","Type":"ContainerStarted","Data":"11a9ead41658435a1c1a8e03bef21a11d70411f97d3b7ebe337851cbdf647e5b"} Feb 16 13:25:42 crc kubenswrapper[4816]: I0216 13:25:42.848238 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.848219286 podStartE2EDuration="1.848219286s" podCreationTimestamp="2026-02-16 13:25:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:25:42.841564255 +0000 UTC m=+1342.168277983" watchObservedRunningTime="2026-02-16 13:25:42.848219286 +0000 UTC m=+1342.174933014" Feb 16 13:25:45 crc kubenswrapper[4816]: I0216 13:25:45.147690 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 16 13:25:46 crc kubenswrapper[4816]: I0216 13:25:46.474734 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 13:25:46 crc kubenswrapper[4816]: I0216 13:25:46.475135 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 13:25:49 crc kubenswrapper[4816]: I0216 13:25:49.225089 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 13:25:49 crc kubenswrapper[4816]: I0216 13:25:49.225403 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 13:25:50 crc kubenswrapper[4816]: I0216 13:25:50.148523 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 16 13:25:50 crc kubenswrapper[4816]: I0216 13:25:50.189089 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 16 13:25:50 crc kubenswrapper[4816]: I0216 13:25:50.234898 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 16 13:25:50 crc kubenswrapper[4816]: I0216 13:25:50.234962 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 16 13:25:50 crc kubenswrapper[4816]: I0216 13:25:50.930986 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 16 13:25:51 crc kubenswrapper[4816]: I0216 13:25:51.475697 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 13:25:51 crc kubenswrapper[4816]: I0216 13:25:51.475762 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 13:25:52 crc kubenswrapper[4816]: I0216 13:25:52.489903 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 16 13:25:52 crc kubenswrapper[4816]: I0216 13:25:52.489924 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 16 13:25:59 crc kubenswrapper[4816]: I0216 13:25:59.236453 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 13:25:59 crc kubenswrapper[4816]: I0216 13:25:59.237123 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 13:25:59 crc kubenswrapper[4816]: I0216 13:25:59.237769 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 13:25:59 crc kubenswrapper[4816]: I0216 13:25:59.238205 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 13:25:59 crc kubenswrapper[4816]: I0216 13:25:59.245920 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 13:25:59 crc kubenswrapper[4816]: I0216 13:25:59.246358 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 13:26:01 crc kubenswrapper[4816]: I0216 13:26:01.482145 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 13:26:01 crc kubenswrapper[4816]: I0216 13:26:01.482611 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 13:26:01 crc kubenswrapper[4816]: I0216 13:26:01.489159 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 13:26:01 crc kubenswrapper[4816]: I0216 13:26:01.491591 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 13:26:05 crc kubenswrapper[4816]: I0216 13:26:05.108715 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.100349 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-7489-account-create-update-rr6kg"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.102494 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.114158 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.168753 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-7489-account-create-update-9p66g"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.171056 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-operator-scripts\") pod \"cinder-7489-account-create-update-rr6kg\" (UID: \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\") " pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.171187 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcx8g\" (UniqueName: \"kubernetes.io/projected/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-kube-api-access-jcx8g\") pod \"cinder-7489-account-create-update-rr6kg\" (UID: \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\") " pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.215810 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-7489-account-create-update-9p66g"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.263213 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-7489-account-create-update-rr6kg"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.276588 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-operator-scripts\") pod \"cinder-7489-account-create-update-rr6kg\" (UID: \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\") " pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.276726 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcx8g\" (UniqueName: \"kubernetes.io/projected/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-kube-api-access-jcx8g\") pod \"cinder-7489-account-create-update-rr6kg\" (UID: \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\") " pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.278116 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-operator-scripts\") pod \"cinder-7489-account-create-update-rr6kg\" (UID: \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\") " pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.311748 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-mvvwk"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.313308 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.316821 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.327159 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcx8g\" (UniqueName: \"kubernetes.io/projected/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-kube-api-access-jcx8g\") pod \"cinder-7489-account-create-update-rr6kg\" (UID: \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\") " pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.345761 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-mvvwk"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.384759 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.385309 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="cca53be3-2b0f-4523-8fc4-d992bf72a13c" containerName="openstackclient" containerID="cri-o://cad7db43bacb78d56110ccb724697599cb15ce0faf3ee895f3eca64fe020fab6" gracePeriod=2 Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.393906 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.418013 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60ac96a6-9ece-4d04-994a-70c576c82534" path="/var/lib/kubelet/pods/60ac96a6-9ece-4d04-994a-70c576c82534/volumes" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.430585 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-lrf8c"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.473518 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-lrf8c"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.484775 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts\") pod \"root-account-create-update-mvvwk\" (UID: \"161ac06a-fdce-4a22-b21c-d9a297bf4142\") " pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.484875 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mllmd\" (UniqueName: \"kubernetes.io/projected/161ac06a-fdce-4a22-b21c-d9a297bf4142-kube-api-access-mllmd\") pod \"root-account-create-update-mvvwk\" (UID: \"161ac06a-fdce-4a22-b21c-d9a297bf4142\") " pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.490502 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.504221 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-fc34-account-create-update-q25nr"] Feb 16 13:26:25 crc kubenswrapper[4816]: E0216 13:26:25.504768 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cca53be3-2b0f-4523-8fc4-d992bf72a13c" containerName="openstackclient" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.504801 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cca53be3-2b0f-4523-8fc4-d992bf72a13c" containerName="openstackclient" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.505048 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="cca53be3-2b0f-4523-8fc4-d992bf72a13c" containerName="openstackclient" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.505897 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.513102 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.524768 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-fc34-account-create-update-q25nr"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.587627 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mllmd\" (UniqueName: \"kubernetes.io/projected/161ac06a-fdce-4a22-b21c-d9a297bf4142-kube-api-access-mllmd\") pod \"root-account-create-update-mvvwk\" (UID: \"161ac06a-fdce-4a22-b21c-d9a297bf4142\") " pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.588253 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts\") pod \"root-account-create-update-mvvwk\" (UID: \"161ac06a-fdce-4a22-b21c-d9a297bf4142\") " pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.589200 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts\") pod \"root-account-create-update-mvvwk\" (UID: \"161ac06a-fdce-4a22-b21c-d9a297bf4142\") " pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.658416 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-1657-account-create-update-jfrmp"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.663347 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.680605 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.756387 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6847-account-create-update-vdkv9"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.805769 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mllmd\" (UniqueName: \"kubernetes.io/projected/161ac06a-fdce-4a22-b21c-d9a297bf4142-kube-api-access-mllmd\") pod \"root-account-create-update-mvvwk\" (UID: \"161ac06a-fdce-4a22-b21c-d9a297bf4142\") " pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.851009 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07be83ef-77e3-4327-bcbd-ed16e9a92e40-operator-scripts\") pod \"barbican-fc34-account-create-update-q25nr\" (UID: \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\") " pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.851344 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw5l8\" (UniqueName: \"kubernetes.io/projected/07be83ef-77e3-4327-bcbd-ed16e9a92e40-kube-api-access-gw5l8\") pod \"barbican-fc34-account-create-update-q25nr\" (UID: \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\") " pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.873791 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.876294 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.882747 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-1657-account-create-update-jfrmp"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.931762 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-fc34-account-create-update-whvjm"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.954290 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-e623-account-create-update-9hfsw"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.955867 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw5l8\" (UniqueName: \"kubernetes.io/projected/07be83ef-77e3-4327-bcbd-ed16e9a92e40-kube-api-access-gw5l8\") pod \"barbican-fc34-account-create-update-q25nr\" (UID: \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\") " pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.955934 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct8p5\" (UniqueName: \"kubernetes.io/projected/e3af0ec9-22bb-4119-b349-bef284903316-kube-api-access-ct8p5\") pod \"glance-1657-account-create-update-jfrmp\" (UID: \"e3af0ec9-22bb-4119-b349-bef284903316\") " pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.955988 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07be83ef-77e3-4327-bcbd-ed16e9a92e40-operator-scripts\") pod \"barbican-fc34-account-create-update-q25nr\" (UID: \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\") " pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.956319 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3af0ec9-22bb-4119-b349-bef284903316-operator-scripts\") pod \"glance-1657-account-create-update-jfrmp\" (UID: \"e3af0ec9-22bb-4119-b349-bef284903316\") " pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.956435 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.957100 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07be83ef-77e3-4327-bcbd-ed16e9a92e40-operator-scripts\") pod \"barbican-fc34-account-create-update-q25nr\" (UID: \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\") " pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.958904 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.976854 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-fc34-account-create-update-whvjm"] Feb 16 13:26:25 crc kubenswrapper[4816]: I0216 13:26:25.986581 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw5l8\" (UniqueName: \"kubernetes.io/projected/07be83ef-77e3-4327-bcbd-ed16e9a92e40-kube-api-access-gw5l8\") pod \"barbican-fc34-account-create-update-q25nr\" (UID: \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\") " pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.004960 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e623-account-create-update-9hfsw"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.005070 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.025284 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.026118 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerName="openstack-network-exporter" containerID="cri-o://21fba53057aa6cf88d2e0405e7ed7ba15f4e8c3f5cb13e82b0cbe8e8ec11ac99" gracePeriod=300 Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.057485 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6847-account-create-update-vdkv9"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.057848 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct8p5\" (UniqueName: \"kubernetes.io/projected/e3af0ec9-22bb-4119-b349-bef284903316-kube-api-access-ct8p5\") pod \"glance-1657-account-create-update-jfrmp\" (UID: \"e3af0ec9-22bb-4119-b349-bef284903316\") " pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.058695 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3af0ec9-22bb-4119-b349-bef284903316-operator-scripts\") pod \"glance-1657-account-create-update-jfrmp\" (UID: \"e3af0ec9-22bb-4119-b349-bef284903316\") " pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.059105 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqqvh\" (UniqueName: \"kubernetes.io/projected/deed1b26-cbe9-476b-8cc3-9898c6ad929f-kube-api-access-bqqvh\") pod \"placement-6847-account-create-update-vdkv9\" (UID: \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\") " pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.059190 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deed1b26-cbe9-476b-8cc3-9898c6ad929f-operator-scripts\") pod \"placement-6847-account-create-update-vdkv9\" (UID: \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\") " pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.060130 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3af0ec9-22bb-4119-b349-bef284903316-operator-scripts\") pod \"glance-1657-account-create-update-jfrmp\" (UID: \"e3af0ec9-22bb-4119-b349-bef284903316\") " pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.073234 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-1657-account-create-update-9lhq5"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.095427 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct8p5\" (UniqueName: \"kubernetes.io/projected/e3af0ec9-22bb-4119-b349-bef284903316-kube-api-access-ct8p5\") pod \"glance-1657-account-create-update-jfrmp\" (UID: \"e3af0ec9-22bb-4119-b349-bef284903316\") " pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.103553 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-1657-account-create-update-9lhq5"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.131077 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.131510 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="ovn-northd" containerID="cri-o://3ab81f6efce6fb86362c42eda7876dd469e8113fb561b222ea56c2868f292aeb" gracePeriod=30 Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.131717 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="openstack-network-exporter" containerID="cri-o://69ee0cac9e4f93da6f2382337f0f124d262804d3845fd42fa8d7a742bee8220b" gracePeriod=30 Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.150399 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.194758 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.196134 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2576d56f-8296-4693-911b-20b6814cd8aa-operator-scripts\") pod \"neutron-e623-account-create-update-9hfsw\" (UID: \"2576d56f-8296-4693-911b-20b6814cd8aa\") " pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.196179 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6n2p\" (UniqueName: \"kubernetes.io/projected/2576d56f-8296-4693-911b-20b6814cd8aa-kube-api-access-j6n2p\") pod \"neutron-e623-account-create-update-9hfsw\" (UID: \"2576d56f-8296-4693-911b-20b6814cd8aa\") " pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.196248 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqqvh\" (UniqueName: \"kubernetes.io/projected/deed1b26-cbe9-476b-8cc3-9898c6ad929f-kube-api-access-bqqvh\") pod \"placement-6847-account-create-update-vdkv9\" (UID: \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\") " pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.196276 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deed1b26-cbe9-476b-8cc3-9898c6ad929f-operator-scripts\") pod \"placement-6847-account-create-update-vdkv9\" (UID: \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\") " pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.196942 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deed1b26-cbe9-476b-8cc3-9898c6ad929f-operator-scripts\") pod \"placement-6847-account-create-update-vdkv9\" (UID: \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\") " pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.200885 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.201564 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerName="openstack-network-exporter" containerID="cri-o://928d717d6d418e23ea08ee966cb3977d296a5c9fa2ddbce5816ef4e5e13e37d9" gracePeriod=300 Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.231869 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerName="ovsdbserver-sb" containerID="cri-o://7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000" gracePeriod=300 Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.236912 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.316066 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2576d56f-8296-4693-911b-20b6814cd8aa-operator-scripts\") pod \"neutron-e623-account-create-update-9hfsw\" (UID: \"2576d56f-8296-4693-911b-20b6814cd8aa\") " pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.316431 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6n2p\" (UniqueName: \"kubernetes.io/projected/2576d56f-8296-4693-911b-20b6814cd8aa-kube-api-access-j6n2p\") pod \"neutron-e623-account-create-update-9hfsw\" (UID: \"2576d56f-8296-4693-911b-20b6814cd8aa\") " pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.317834 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.317898 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data podName:9eb39773-46a3-4f31-a95a-64a183dbe417 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:26.817870812 +0000 UTC m=+1386.144584540 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data") pod "rabbitmq-server-0" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417") : configmap "rabbitmq-config-data" not found Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.318592 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2576d56f-8296-4693-911b-20b6814cd8aa-operator-scripts\") pod \"neutron-e623-account-create-update-9hfsw\" (UID: \"2576d56f-8296-4693-911b-20b6814cd8aa\") " pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.340123 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqqvh\" (UniqueName: \"kubernetes.io/projected/deed1b26-cbe9-476b-8cc3-9898c6ad929f-kube-api-access-bqqvh\") pod \"placement-6847-account-create-update-vdkv9\" (UID: \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\") " pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.342765 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-1245-account-create-update-k2kgr"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.344224 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.372669 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.404061 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ovn-northd-0" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="ovn-northd" probeResult="failure" output=< Feb 16 13:26:26 crc kubenswrapper[4816]: 2026-02-16T13:26:26Z|00001|unixctl|WARN|failed to connect to /tmp/ovn-northd.1.ctl Feb 16 13:26:26 crc kubenswrapper[4816]: ovn-appctl: cannot connect to "/tmp/ovn-northd.1.ctl" (No such file or directory) Feb 16 13:26:26 crc kubenswrapper[4816]: 2026-02-16T13:26:26Z|00001|unixctl|WARN|failed to connect to /tmp/ovn-northd.1.ctl Feb 16 13:26:26 crc kubenswrapper[4816]: ovn-appctl: cannot connect to "/tmp/ovn-northd.1.ctl" (No such file or directory) Feb 16 13:26:26 crc kubenswrapper[4816]: > Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.405219 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6n2p\" (UniqueName: \"kubernetes.io/projected/2576d56f-8296-4693-911b-20b6814cd8aa-kube-api-access-j6n2p\") pod \"neutron-e623-account-create-update-9hfsw\" (UID: \"2576d56f-8296-4693-911b-20b6814cd8aa\") " pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.413198 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1245-account-create-update-k2kgr"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.424872 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c97xh\" (UniqueName: \"kubernetes.io/projected/04914705-9b32-4bae-a7d7-e5bcc15337ac-kube-api-access-c97xh\") pod \"nova-api-1245-account-create-update-k2kgr\" (UID: \"04914705-9b32-4bae-a7d7-e5bcc15337ac\") " pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.425121 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04914705-9b32-4bae-a7d7-e5bcc15337ac-operator-scripts\") pod \"nova-api-1245-account-create-update-k2kgr\" (UID: \"04914705-9b32-4bae-a7d7-e5bcc15337ac\") " pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.426970 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-jsr6m"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.428143 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.441314 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.442779 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="ovn-northd" probeResult="failure" output=< Feb 16 13:26:26 crc kubenswrapper[4816]: 2026-02-16T13:26:26Z|00001|unixctl|WARN|failed to connect to /tmp/ovn-northd.1.ctl Feb 16 13:26:26 crc kubenswrapper[4816]: ovn-appctl: cannot connect to "/tmp/ovn-northd.1.ctl" (No such file or directory) Feb 16 13:26:26 crc kubenswrapper[4816]: 2026-02-16T13:26:26Z|00001|unixctl|WARN|failed to connect to /tmp/ovn-northd.1.ctl Feb 16 13:26:26 crc kubenswrapper[4816]: ovn-appctl: cannot connect to "/tmp/ovn-northd.1.ctl" (No such file or directory) Feb 16 13:26:26 crc kubenswrapper[4816]: > Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.472816 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6847-account-create-update-lb227"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.484356 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerName="ovsdbserver-nb" containerID="cri-o://6e64aee290d85b9d87d4f4f3be6dbde686eecaeb8eb080e46016bd90348ba633" gracePeriod=300 Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.489727 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-sqvh5"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.503285 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.511379 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7d49f257-3900-43a9-b4c2-353ceeeeea88/ovsdbserver-sb/0.log" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.511419 4816 generic.go:334] "Generic (PLEG): container finished" podID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerID="21fba53057aa6cf88d2e0405e7ed7ba15f4e8c3f5cb13e82b0cbe8e8ec11ac99" exitCode=2 Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.511552 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7d49f257-3900-43a9-b4c2-353ceeeeea88","Type":"ContainerDied","Data":"21fba53057aa6cf88d2e0405e7ed7ba15f4e8c3f5cb13e82b0cbe8e8ec11ac99"} Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.527873 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c97xh\" (UniqueName: \"kubernetes.io/projected/04914705-9b32-4bae-a7d7-e5bcc15337ac-kube-api-access-c97xh\") pod \"nova-api-1245-account-create-update-k2kgr\" (UID: \"04914705-9b32-4bae-a7d7-e5bcc15337ac\") " pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.527960 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04914705-9b32-4bae-a7d7-e5bcc15337ac-operator-scripts\") pod \"nova-api-1245-account-create-update-k2kgr\" (UID: \"04914705-9b32-4bae-a7d7-e5bcc15337ac\") " pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.528162 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/574f06c2-d10b-4b4c-b047-16f0bec03b42-operator-scripts\") pod \"nova-cell0-d27b-account-create-update-jsr6m\" (UID: \"574f06c2-d10b-4b4c-b047-16f0bec03b42\") " pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.528397 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nczk\" (UniqueName: \"kubernetes.io/projected/574f06c2-d10b-4b4c-b047-16f0bec03b42-kube-api-access-2nczk\") pod \"nova-cell0-d27b-account-create-update-jsr6m\" (UID: \"574f06c2-d10b-4b4c-b047-16f0bec03b42\") " pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.531312 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04914705-9b32-4bae-a7d7-e5bcc15337ac-operator-scripts\") pod \"nova-api-1245-account-create-update-k2kgr\" (UID: \"04914705-9b32-4bae-a7d7-e5bcc15337ac\") " pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.532541 4816 configmap.go:193] Couldn't get configMap openstack/ovncontroller-metrics-config: configmap "ovncontroller-metrics-config" not found Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.532602 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config podName:e9f02af3-d5f6-4c8c-81b6-9889d79b0925 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:27.032585117 +0000 UTC m=+1386.359298845 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config") pod "ovn-controller-metrics-qxznq" (UID: "e9f02af3-d5f6-4c8c-81b6-9889d79b0925") : configmap "ovncontroller-metrics-config" not found Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.545695 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-6847-account-create-update-lb227"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.553433 4816 generic.go:334] "Generic (PLEG): container finished" podID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerID="69ee0cac9e4f93da6f2382337f0f124d262804d3845fd42fa8d7a742bee8220b" exitCode=2 Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.553474 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8b7ff418-3104-4d5e-880c-bc9de7258943","Type":"ContainerDied","Data":"69ee0cac9e4f93da6f2382337f0f124d262804d3845fd42fa8d7a742bee8220b"} Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.556807 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c97xh\" (UniqueName: \"kubernetes.io/projected/04914705-9b32-4bae-a7d7-e5bcc15337ac-kube-api-access-c97xh\") pod \"nova-api-1245-account-create-update-k2kgr\" (UID: \"04914705-9b32-4bae-a7d7-e5bcc15337ac\") " pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.568924 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-sqvh5"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.592321 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-jsr6m"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.611388 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e623-account-create-update-vfwn8"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.634339 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/574f06c2-d10b-4b4c-b047-16f0bec03b42-operator-scripts\") pod \"nova-cell0-d27b-account-create-update-jsr6m\" (UID: \"574f06c2-d10b-4b4c-b047-16f0bec03b42\") " pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.634474 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nczk\" (UniqueName: \"kubernetes.io/projected/574f06c2-d10b-4b4c-b047-16f0bec03b42-kube-api-access-2nczk\") pod \"nova-cell0-d27b-account-create-update-jsr6m\" (UID: \"574f06c2-d10b-4b4c-b047-16f0bec03b42\") " pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.637451 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/574f06c2-d10b-4b4c-b047-16f0bec03b42-operator-scripts\") pod \"nova-cell0-d27b-account-create-update-jsr6m\" (UID: \"574f06c2-d10b-4b4c-b047-16f0bec03b42\") " pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.660256 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.676878 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-e623-account-create-update-vfwn8"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.689817 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nczk\" (UniqueName: \"kubernetes.io/projected/574f06c2-d10b-4b4c-b047-16f0bec03b42-kube-api-access-2nczk\") pod \"nova-cell0-d27b-account-create-update-jsr6m\" (UID: \"574f06c2-d10b-4b4c-b047-16f0bec03b42\") " pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.746869 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-bcx8r"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.755895 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.774275 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.774509 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.781920 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-8mdth"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.836768 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.867510 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.867577 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrcqk\" (UniqueName: \"kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.868418 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.868485 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data podName:9eb39773-46a3-4f31-a95a-64a183dbe417 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:27.868466071 +0000 UTC m=+1387.195179799 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data") pod "rabbitmq-server-0" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417") : configmap "rabbitmq-config-data" not found Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.905213 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-bcx8r"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.934150 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-8mdth"] Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.969177 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:26 crc kubenswrapper[4816]: I0216 13:26:26.969232 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrcqk\" (UniqueName: \"kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.969717 4816 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.969779 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts podName:ddbe806c-23ac-4f2f-87e1-be1ec2189c87 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:27.469759537 +0000 UTC m=+1386.796473265 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts") pod "nova-cell1-c4fc-account-create-update-bcx8r" (UID: "ddbe806c-23ac-4f2f-87e1-be1ec2189c87") : configmap "openstack-cell1-scripts" not found Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.975281 4816 projected.go:194] Error preparing data for projected volume kube-api-access-jrcqk for pod openstack/nova-cell1-c4fc-account-create-update-bcx8r: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Feb 16 13:26:26 crc kubenswrapper[4816]: E0216 13:26:26.975361 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk podName:ddbe806c-23ac-4f2f-87e1-be1ec2189c87 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:27.47534215 +0000 UTC m=+1386.802055878 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-jrcqk" (UniqueName: "kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk") pod "nova-cell1-c4fc-account-create-update-bcx8r" (UID: "ddbe806c-23ac-4f2f-87e1-be1ec2189c87") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.003212 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:27 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: if [ -n "cinder" ]; then Feb 16 13:26:27 crc kubenswrapper[4816]: GRANT_DATABASE="cinder" Feb 16 13:26:27 crc kubenswrapper[4816]: else Feb 16 13:26:27 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:27 crc kubenswrapper[4816]: fi Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:27 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:27 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:27 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:27 crc kubenswrapper[4816]: # support updates Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.004537 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack/cinder-7489-account-create-update-rr6kg" podUID="bfa5bec4-12b0-4788-a9d2-4dc39afd56e3" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.065448 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1245-account-create-update-tbtlc"] Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.075720 4816 configmap.go:193] Couldn't get configMap openstack/ovncontroller-metrics-config: configmap "ovncontroller-metrics-config" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.075796 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config podName:e9f02af3-d5f6-4c8c-81b6-9889d79b0925 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:28.075778833 +0000 UTC m=+1387.402492551 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config") pod "ovn-controller-metrics-qxznq" (UID: "e9f02af3-d5f6-4c8c-81b6-9889d79b0925") : configmap "ovncontroller-metrics-config" not found Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.085475 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-1245-account-create-update-tbtlc"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.099706 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-5ljmf"] Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.110233 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000 is running failed: container process not found" containerID="7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000" cmd=["/usr/bin/pidof","ovsdb-server"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.116253 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-5ljmf"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.133350 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-dncx2"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.139553 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-dncx2"] Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.141855 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000 is running failed: container process not found" containerID="7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000" cmd=["/usr/bin/pidof","ovsdb-server"] Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.145983 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000 is running failed: container process not found" containerID="7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000" cmd=["/usr/bin/pidof","ovsdb-server"] Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.147276 4816 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerName="ovsdbserver-sb" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.149421 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-6nfpn"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.161995 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-6nfpn"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.199083 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-blhnm"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.251911 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-blhnm"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.309288 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-v9w6q"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.457321 4816 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/cinder-scheduler-0" secret="" err="secret \"cinder-cinder-dockercfg-6vjhh\" not found" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.519960 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="049f93f8-ed00-4331-b932-1e4f98ffe8c2" path="/var/lib/kubelet/pods/049f93f8-ed00-4331-b932-1e4f98ffe8c2/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.520942 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0731f364-690d-4bf9-a86f-cbaa984c62c8" path="/var/lib/kubelet/pods/0731f364-690d-4bf9-a86f-cbaa984c62c8/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.521511 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4843e372-830c-4b62-a21b-6e9fe41d7973" path="/var/lib/kubelet/pods/4843e372-830c-4b62-a21b-6e9fe41d7973/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.532115 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="608922e5-2248-48a4-bd64-6a0a508ddf23" path="/var/lib/kubelet/pods/608922e5-2248-48a4-bd64-6a0a508ddf23/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.532725 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f169a08-6015-443c-a9d9-5e3f55e5ef58" path="/var/lib/kubelet/pods/9f169a08-6015-443c-a9d9-5e3f55e5ef58/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.533279 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aeb7d949-8e61-4bfa-8b02-0213a7861b9f" path="/var/lib/kubelet/pods/aeb7d949-8e61-4bfa-8b02-0213a7861b9f/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.537392 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af6d642c-ddbc-4faa-8871-ad5556ff1a64" path="/var/lib/kubelet/pods/af6d642c-ddbc-4faa-8871-ad5556ff1a64/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.538176 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0d3c9a3-acab-4211-8a7f-90f5a6fe0060" path="/var/lib/kubelet/pods/b0d3c9a3-acab-4211-8a7f-90f5a6fe0060/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.538733 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b97c413a-6a78-4ff5-87e8-31639467ae1d" path="/var/lib/kubelet/pods/b97c413a-6a78-4ff5-87e8-31639467ae1d/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.547534 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f23d2bf5-14b2-4681-944f-cdb0aedf86c1" path="/var/lib/kubelet/pods/f23d2bf5-14b2-4681-944f-cdb0aedf86c1/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.548417 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f64f8bcc-d25f-4799-b916-7604027ba614" path="/var/lib/kubelet/pods/f64f8bcc-d25f-4799-b916-7604027ba614/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.549003 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f66acf8f-1ec5-4f7a-aff9-2511af6e9d78" path="/var/lib/kubelet/pods/f66acf8f-1ec5-4f7a-aff9-2511af6e9d78/volumes" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.554822 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.554861 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrcqk\" (UniqueName: \"kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.555034 4816 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.555098 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts podName:ddbe806c-23ac-4f2f-87e1-be1ec2189c87 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:28.555079024 +0000 UTC m=+1387.881792752 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts") pod "nova-cell1-c4fc-account-create-update-bcx8r" (UID: "ddbe806c-23ac-4f2f-87e1-be1ec2189c87") : configmap "openstack-cell1-scripts" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.560992 4816 projected.go:194] Error preparing data for projected volume kube-api-access-jrcqk for pod openstack/nova-cell1-c4fc-account-create-update-bcx8r: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.561073 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk podName:ddbe806c-23ac-4f2f-87e1-be1ec2189c87 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:28.561049747 +0000 UTC m=+1387.887763515 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-jrcqk" (UniqueName: "kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk") pod "nova-cell1-c4fc-account-create-update-bcx8r" (UID: "ddbe806c-23ac-4f2f-87e1-be1ec2189c87") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.589771 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-qxznq"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.615466 4816 generic.go:334] "Generic (PLEG): container finished" podID="cca53be3-2b0f-4523-8fc4-d992bf72a13c" containerID="cad7db43bacb78d56110ccb724697599cb15ce0faf3ee895f3eca64fe020fab6" exitCode=137 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.617206 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7489-account-create-update-rr6kg" event={"ID":"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3","Type":"ContainerStarted","Data":"6ce176b5df0e2b72fe7e1c6fa3706f50b4b7c9967bd8a35b15d2b449628cbf7d"} Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.619517 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:27 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: if [ -n "cinder" ]; then Feb 16 13:26:27 crc kubenswrapper[4816]: GRANT_DATABASE="cinder" Feb 16 13:26:27 crc kubenswrapper[4816]: else Feb 16 13:26:27 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:27 crc kubenswrapper[4816]: fi Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:27 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:27 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:27 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:27 crc kubenswrapper[4816]: # support updates Feb 16 13:26:27 crc kubenswrapper[4816]: Feb 16 13:26:27 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.621735 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack/cinder-7489-account-create-update-rr6kg" podUID="bfa5bec4-12b0-4788-a9d2-4dc39afd56e3" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.623926 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279/ovsdbserver-nb/0.log" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.623960 4816 generic.go:334] "Generic (PLEG): container finished" podID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerID="928d717d6d418e23ea08ee966cb3977d296a5c9fa2ddbce5816ef4e5e13e37d9" exitCode=2 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.623973 4816 generic.go:334] "Generic (PLEG): container finished" podID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerID="6e64aee290d85b9d87d4f4f3be6dbde686eecaeb8eb080e46016bd90348ba633" exitCode=143 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.624010 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279","Type":"ContainerDied","Data":"928d717d6d418e23ea08ee966cb3977d296a5c9fa2ddbce5816ef4e5e13e37d9"} Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.624032 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279","Type":"ContainerDied","Data":"6e64aee290d85b9d87d4f4f3be6dbde686eecaeb8eb080e46016bd90348ba633"} Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.629409 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7d49f257-3900-43a9-b4c2-353ceeeeea88/ovsdbserver-sb/0.log" Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.629459 4816 generic.go:334] "Generic (PLEG): container finished" podID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerID="7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000" exitCode=143 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.629634 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-qxznq" podUID="e9f02af3-d5f6-4c8c-81b6-9889d79b0925" containerName="openstack-network-exporter" containerID="cri-o://ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.629752 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7d49f257-3900-43a9-b4c2-353ceeeeea88","Type":"ContainerDied","Data":"7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000"} Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.631280 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-rfd9r"] Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.661182 4816 secret.go:188] Couldn't get secret openstack/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.661236 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:28.161221823 +0000 UTC m=+1387.487935541 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scheduler-config-data" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.661431 4816 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.661454 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:28.161447139 +0000 UTC m=+1387.488160867 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scripts" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.662640 4816 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.662722 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:28.162705163 +0000 UTC m=+1387.489418891 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-config-data" not found Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.676041 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-4sts7"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.714129 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-4sts7"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.729792 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-mnq5r"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.755585 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-mnq5r"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.776984 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.777447 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-server" containerID="cri-o://f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.777926 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="swift-recon-cron" containerID="cri-o://b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.777961 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-updater" containerID="cri-o://bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778041 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-auditor" containerID="cri-o://f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778072 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="rsync" containerID="cri-o://2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778078 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-replicator" containerID="cri-o://227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778109 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-server" containerID="cri-o://588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778111 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-expirer" containerID="cri-o://decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778122 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-updater" containerID="cri-o://64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778163 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-reaper" containerID="cri-o://8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778166 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-auditor" containerID="cri-o://688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778192 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-auditor" containerID="cri-o://028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778197 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-replicator" containerID="cri-o://5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778222 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-replicator" containerID="cri-o://a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.778231 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-server" containerID="cri-o://403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.786198 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.786405 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerName="cinder-scheduler" containerID="cri-o://18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.787524 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerName="probe" containerID="cri-o://934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.805749 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-7489-account-create-update-rr6kg"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.819575 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.819813 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerName="cinder-api-log" containerID="cri-o://e1555ec2ef7c331226bf5f3c9dd304c9a719ac9687ba3e0531c29bd7c838c76b" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.821688 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerName="cinder-api" containerID="cri-o://b4a948c900f30d0434262ce037301027d88ee90b747aed9a2dc7d36fc8c7b454" gracePeriod=30 Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.868951 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.869020 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data podName:9eb39773-46a3-4f31-a95a-64a183dbe417 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:29.869005368 +0000 UTC m=+1389.195719096 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data") pod "rabbitmq-server-0" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417") : configmap "rabbitmq-config-data" not found Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.873739 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.884615 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-7489-account-create-update-rr6kg"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.897119 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-smv6j"] Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.897743 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" podUID="a2ea4453-2b42-409d-bac8-b317e43dcf6a" containerName="dnsmasq-dns" containerID="cri-o://dbd428f503f966eeb3226bb68ba1f69e1b42e9b75e71221255c203cb87e57a4a" gracePeriod=10 Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.977623 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:27 crc kubenswrapper[4816]: E0216 13:26:27.977708 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data podName:ecfcee51-c740-477a-87d9-558fffc58686 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:28.477690117 +0000 UTC m=+1387.804403845 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data") pod "rabbitmq-cell1-server-0" (UID: "ecfcee51-c740-477a-87d9-558fffc58686") : configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:27 crc kubenswrapper[4816]: I0216 13:26:27.983155 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-g75rg"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.008968 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-g75rg"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.026925 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.027186 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1940a629-51c3-4dca-a26d-02080dabbd68" containerName="glance-log" containerID="cri-o://b6ca05adab300fd9ccf2880dc760585e09aaf0866632bf3a934d424a6c6e0afc" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.027371 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1940a629-51c3-4dca-a26d-02080dabbd68" containerName="glance-httpd" containerID="cri-o://b594b400e21605362a39b0644bd2c43537ea857aedc6e60fe673ee3964203cf8" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.048867 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-bqsq5"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.058439 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-bqsq5"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.061870 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7d49f257-3900-43a9-b4c2-353ceeeeea88/ovsdbserver-sb/0.log" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.061932 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.084248 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279/ovsdbserver-nb/0.log" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.084340 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.092121 4816 configmap.go:193] Couldn't get configMap openstack/ovncontroller-metrics-config: configmap "ovncontroller-metrics-config" not found Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.092275 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config podName:e9f02af3-d5f6-4c8c-81b6-9889d79b0925 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:30.092193804 +0000 UTC m=+1389.418907532 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config") pod "ovn-controller-metrics-qxznq" (UID: "e9f02af3-d5f6-4c8c-81b6-9889d79b0925") : configmap "ovncontroller-metrics-config" not found Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.107007 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-wmrm4"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.137716 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.139883 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-wmrm4"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.192694 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-config\") pod \"7d49f257-3900-43a9-b4c2-353ceeeeea88\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.192733 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdbserver-sb-tls-certs\") pod \"7d49f257-3900-43a9-b4c2-353ceeeeea88\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.192781 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdb-rundir\") pod \"7d49f257-3900-43a9-b4c2-353ceeeeea88\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.192836 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtltz\" (UniqueName: \"kubernetes.io/projected/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-kube-api-access-rtltz\") pod \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.192894 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-scripts\") pod \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.192934 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-scripts\") pod \"7d49f257-3900-43a9-b4c2-353ceeeeea88\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.192961 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-config\") pod \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193008 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5x2kt\" (UniqueName: \"kubernetes.io/projected/7d49f257-3900-43a9-b4c2-353ceeeeea88-kube-api-access-5x2kt\") pod \"7d49f257-3900-43a9-b4c2-353ceeeeea88\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193046 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-metrics-certs-tls-certs\") pod \"7d49f257-3900-43a9-b4c2-353ceeeeea88\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193103 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193131 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-metrics-certs-tls-certs\") pod \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193167 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdb-rundir\") pod \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193212 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"7d49f257-3900-43a9-b4c2-353ceeeeea88\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193237 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-combined-ca-bundle\") pod \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193292 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-combined-ca-bundle\") pod \"7d49f257-3900-43a9-b4c2-353ceeeeea88\" (UID: \"7d49f257-3900-43a9-b4c2-353ceeeeea88\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193325 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdbserver-nb-tls-certs\") pod \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\" (UID: \"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.193894 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-config" (OuterVolumeSpecName: "config") pod "7d49f257-3900-43a9-b4c2-353ceeeeea88" (UID: "7d49f257-3900-43a9-b4c2-353ceeeeea88"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.193932 4816 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.193980 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:29.193965964 +0000 UTC m=+1388.520679692 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scripts" not found Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.194018 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-scripts" (OuterVolumeSpecName: "scripts") pod "7d49f257-3900-43a9-b4c2-353ceeeeea88" (UID: "7d49f257-3900-43a9-b4c2-353ceeeeea88"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.195030 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-scripts" (OuterVolumeSpecName: "scripts") pod "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" (UID: "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.199918 4816 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.200030 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:29.200007148 +0000 UTC m=+1388.526720896 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-config-data" not found Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.201273 4816 secret.go:188] Couldn't get secret openstack/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.201313 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:29.201300434 +0000 UTC m=+1388.528014162 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scheduler-config-data" not found Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.214100 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "7d49f257-3900-43a9-b4c2-353ceeeeea88" (UID: "7d49f257-3900-43a9-b4c2-353ceeeeea88"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.225759 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" (UID: "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.234290 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" (UID: "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.243023 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-l7mtz"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.246996 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "7d49f257-3900-43a9-b4c2-353ceeeeea88" (UID: "7d49f257-3900-43a9-b4c2-353ceeeeea88"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.248214 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-l7mtz"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.251131 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-config" (OuterVolumeSpecName: "config") pod "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" (UID: "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.253028 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d49f257-3900-43a9-b4c2-353ceeeeea88-kube-api-access-5x2kt" (OuterVolumeSpecName: "kube-api-access-5x2kt") pod "7d49f257-3900-43a9-b4c2-353ceeeeea88" (UID: "7d49f257-3900-43a9-b4c2-353ceeeeea88"). InnerVolumeSpecName "kube-api-access-5x2kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.253241 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-kube-api-access-rtltz" (OuterVolumeSpecName: "kube-api-access-rtltz") pod "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" (UID: "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279"). InnerVolumeSpecName "kube-api-access-rtltz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.265158 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8ff9ccb6f-bwqh8"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.265396 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-8ff9ccb6f-bwqh8" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" containerName="neutron-api" containerID="cri-o://a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.265962 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-8ff9ccb6f-bwqh8" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" containerName="neutron-httpd" containerID="cri-o://4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.271626 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: if [ -n "barbican" ]; then Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="barbican" Feb 16 13:26:28 crc kubenswrapper[4816]: else Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:28 crc kubenswrapper[4816]: fi Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:28 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:28 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:28 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:28 crc kubenswrapper[4816]: # support updates Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.272842 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-fc34-account-create-update-q25nr" podUID="07be83ef-77e3-4327-bcbd-ed16e9a92e40" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.276437 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-fc34-account-create-update-q25nr"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.293618 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5ffd8b88f4-cqjcr"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.293868 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-5ffd8b88f4-cqjcr" podUID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerName="placement-log" containerID="cri-o://0a37364f47721e42e8d7d8c8e7e0b76b9f09f0c7e0a00afcf23bbc67bb3d615e" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.294297 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-5ffd8b88f4-cqjcr" podUID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerName="placement-api" containerID="cri-o://ed64af8ac2faddc8f5b3609993e7e85b7c02038ee89682aa306fb9d136d0c815" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.300083 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config\") pod \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.300358 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-combined-ca-bundle\") pod \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.300542 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config-secret\") pod \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.300557 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcvrv\" (UniqueName: \"kubernetes.io/projected/cca53be3-2b0f-4523-8fc4-d992bf72a13c-kube-api-access-tcvrv\") pod \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\" (UID: \"cca53be3-2b0f-4523-8fc4-d992bf72a13c\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.307551 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.308206 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerName="glance-log" containerID="cri-o://be17341e9a947a7465a0a48dedaaad75abdb811c1f90696013b655b457845fe7" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.308746 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerName="glance-httpd" containerID="cri-o://527ce40525cb7c9b030d6afba0202147d041220b639540ecbe06dabb3e1425e2" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.309644 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.310267 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.310292 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.310307 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.310321 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.310334 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtltz\" (UniqueName: \"kubernetes.io/projected/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-kube-api-access-rtltz\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.310347 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.310358 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d49f257-3900-43a9-b4c2-353ceeeeea88-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.310370 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.310381 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5x2kt\" (UniqueName: \"kubernetes.io/projected/7d49f257-3900-43a9-b4c2-353ceeeeea88-kube-api-access-5x2kt\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.337636 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cca53be3-2b0f-4523-8fc4-d992bf72a13c-kube-api-access-tcvrv" (OuterVolumeSpecName: "kube-api-access-tcvrv") pod "cca53be3-2b0f-4523-8fc4-d992bf72a13c" (UID: "cca53be3-2b0f-4523-8fc4-d992bf72a13c"). InnerVolumeSpecName "kube-api-access-tcvrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.344874 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" (UID: "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.350228 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-jwglz"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.353523 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qxznq_e9f02af3-d5f6-4c8c-81b6-9889d79b0925/openstack-network-exporter/0.log" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.353624 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.355294 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: if [ -n "glance" ]; then Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="glance" Feb 16 13:26:28 crc kubenswrapper[4816]: else Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:28 crc kubenswrapper[4816]: fi Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:28 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:28 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:28 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:28 crc kubenswrapper[4816]: # support updates Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.357129 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack/glance-1657-account-create-update-jfrmp" podUID="e3af0ec9-22bb-4119-b349-bef284903316" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.362947 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" containerID="cri-o://7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.379880 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-jwglz"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.409394 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e623-account-create-update-9hfsw"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.412512 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcvrv\" (UniqueName: \"kubernetes.io/projected/cca53be3-2b0f-4523-8fc4-d992bf72a13c-kube-api-access-tcvrv\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.412542 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.489290 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-cbf6d8974-7ddwq"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.490334 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" podUID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerName="barbican-keystone-listener-log" containerID="cri-o://deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.490425 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" podUID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerName="barbican-keystone-listener" containerID="cri-o://2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.495847 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6847-account-create-update-vdkv9"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.512503 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-1657-account-create-update-jfrmp"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.513522 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config\") pod \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.513588 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ls9k7\" (UniqueName: \"kubernetes.io/projected/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-kube-api-access-ls9k7\") pod \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.513812 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovn-rundir\") pod \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.513901 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovs-rundir\") pod \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.513928 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-combined-ca-bundle\") pod \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.513976 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-metrics-certs-tls-certs\") pod \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\" (UID: \"e9f02af3-d5f6-4c8c-81b6-9889d79b0925\") " Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.514738 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config" (OuterVolumeSpecName: "config") pod "e9f02af3-d5f6-4c8c-81b6-9889d79b0925" (UID: "e9f02af3-d5f6-4c8c-81b6-9889d79b0925"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.514779 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "e9f02af3-d5f6-4c8c-81b6-9889d79b0925" (UID: "e9f02af3-d5f6-4c8c-81b6-9889d79b0925"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.520307 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "e9f02af3-d5f6-4c8c-81b6-9889d79b0925" (UID: "e9f02af3-d5f6-4c8c-81b6-9889d79b0925"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.542255 4816 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovs-rundir\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.542297 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.542309 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-ovn-rundir\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.548223 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.548303 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data podName:ecfcee51-c740-477a-87d9-558fffc58686 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:29.548280551 +0000 UTC m=+1388.874994279 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data") pod "rabbitmq-cell1-server-0" (UID: "ecfcee51-c740-477a-87d9-558fffc58686") : configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.550609 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-lfw76"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.580457 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-kube-api-access-ls9k7" (OuterVolumeSpecName: "kube-api-access-ls9k7") pod "e9f02af3-d5f6-4c8c-81b6-9889d79b0925" (UID: "e9f02af3-d5f6-4c8c-81b6-9889d79b0925"). InnerVolumeSpecName "kube-api-access-ls9k7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.585466 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-757c679767-6dfp5"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.585763 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-757c679767-6dfp5" podUID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerName="barbican-worker-log" containerID="cri-o://b0b6d2f42d6bfd99410fd4fbcca38774a4fbd1a10e9d1373d8a5aa64dbbd9e0a" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.586107 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-757c679767-6dfp5" podUID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerName="barbican-worker" containerID="cri-o://83a666709d0398bcf18db5bff64d1c6fa8da80e779c24200130a17a483a8ae2b" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.597106 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.611782 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5b75bc5544-lb94h"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.612059 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5b75bc5544-lb94h" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api-log" containerID="cri-o://c3ee070672541f2475c7a5b84908e9db321ca75bcdb3b2238999b588eeb68da3" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.612278 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5b75bc5544-lb94h" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api" containerID="cri-o://305b8eb6bcfac360528db193c73952f20605bc0004e0f5602cffb736efb9d9ec" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.618190 4816 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Feb 16 13:26:28 crc kubenswrapper[4816]: + source /usr/local/bin/container-scripts/functions Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNBridge=br-int Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNRemote=tcp:localhost:6642 Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNEncapType=geneve Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNAvailabilityZones= Feb 16 13:26:28 crc kubenswrapper[4816]: ++ EnableChassisAsGateway=true Feb 16 13:26:28 crc kubenswrapper[4816]: ++ PhysicalNetworks= Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNHostName= Feb 16 13:26:28 crc kubenswrapper[4816]: ++ DB_FILE=/etc/openvswitch/conf.db Feb 16 13:26:28 crc kubenswrapper[4816]: ++ ovs_dir=/var/lib/openvswitch Feb 16 13:26:28 crc kubenswrapper[4816]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Feb 16 13:26:28 crc kubenswrapper[4816]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Feb 16 13:26:28 crc kubenswrapper[4816]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 16 13:26:28 crc kubenswrapper[4816]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 16 13:26:28 crc kubenswrapper[4816]: + sleep 0.5 Feb 16 13:26:28 crc kubenswrapper[4816]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 16 13:26:28 crc kubenswrapper[4816]: + cleanup_ovsdb_server_semaphore Feb 16 13:26:28 crc kubenswrapper[4816]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 16 13:26:28 crc kubenswrapper[4816]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Feb 16 13:26:28 crc kubenswrapper[4816]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-rfd9r" message=< Feb 16 13:26:28 crc kubenswrapper[4816]: Exiting ovsdb-server (5) [ OK ] Feb 16 13:26:28 crc kubenswrapper[4816]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Feb 16 13:26:28 crc kubenswrapper[4816]: + source /usr/local/bin/container-scripts/functions Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNBridge=br-int Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNRemote=tcp:localhost:6642 Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNEncapType=geneve Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNAvailabilityZones= Feb 16 13:26:28 crc kubenswrapper[4816]: ++ EnableChassisAsGateway=true Feb 16 13:26:28 crc kubenswrapper[4816]: ++ PhysicalNetworks= Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNHostName= Feb 16 13:26:28 crc kubenswrapper[4816]: ++ DB_FILE=/etc/openvswitch/conf.db Feb 16 13:26:28 crc kubenswrapper[4816]: ++ ovs_dir=/var/lib/openvswitch Feb 16 13:26:28 crc kubenswrapper[4816]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Feb 16 13:26:28 crc kubenswrapper[4816]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Feb 16 13:26:28 crc kubenswrapper[4816]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 16 13:26:28 crc kubenswrapper[4816]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 16 13:26:28 crc kubenswrapper[4816]: + sleep 0.5 Feb 16 13:26:28 crc kubenswrapper[4816]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 16 13:26:28 crc kubenswrapper[4816]: + cleanup_ovsdb_server_semaphore Feb 16 13:26:28 crc kubenswrapper[4816]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 16 13:26:28 crc kubenswrapper[4816]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Feb 16 13:26:28 crc kubenswrapper[4816]: > Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.618230 4816 kuberuntime_container.go:691] "PreStop hook failed" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Feb 16 13:26:28 crc kubenswrapper[4816]: + source /usr/local/bin/container-scripts/functions Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNBridge=br-int Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNRemote=tcp:localhost:6642 Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNEncapType=geneve Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNAvailabilityZones= Feb 16 13:26:28 crc kubenswrapper[4816]: ++ EnableChassisAsGateway=true Feb 16 13:26:28 crc kubenswrapper[4816]: ++ PhysicalNetworks= Feb 16 13:26:28 crc kubenswrapper[4816]: ++ OVNHostName= Feb 16 13:26:28 crc kubenswrapper[4816]: ++ DB_FILE=/etc/openvswitch/conf.db Feb 16 13:26:28 crc kubenswrapper[4816]: ++ ovs_dir=/var/lib/openvswitch Feb 16 13:26:28 crc kubenswrapper[4816]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Feb 16 13:26:28 crc kubenswrapper[4816]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Feb 16 13:26:28 crc kubenswrapper[4816]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 16 13:26:28 crc kubenswrapper[4816]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 16 13:26:28 crc kubenswrapper[4816]: + sleep 0.5 Feb 16 13:26:28 crc kubenswrapper[4816]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Feb 16 13:26:28 crc kubenswrapper[4816]: + cleanup_ovsdb_server_semaphore Feb 16 13:26:28 crc kubenswrapper[4816]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Feb 16 13:26:28 crc kubenswrapper[4816]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Feb 16 13:26:28 crc kubenswrapper[4816]: > pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" containerID="cri-o://8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.618274 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" containerID="cri-o://8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.634831 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-lfw76"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.647008 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.647067 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrcqk\" (UniqueName: \"kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.647188 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ls9k7\" (UniqueName: \"kubernetes.io/projected/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-kube-api-access-ls9k7\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.647203 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.647568 4816 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.647619 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts podName:ddbe806c-23ac-4f2f-87e1-be1ec2189c87 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:30.647601524 +0000 UTC m=+1389.974315252 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts") pod "nova-cell1-c4fc-account-create-update-bcx8r" (UID: "ddbe806c-23ac-4f2f-87e1-be1ec2189c87") : configmap "openstack-cell1-scripts" not found Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.651348 4816 projected.go:194] Error preparing data for projected volume kube-api-access-jrcqk for pod openstack/nova-cell1-c4fc-account-create-update-bcx8r: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.651405 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk podName:ddbe806c-23ac-4f2f-87e1-be1ec2189c87 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:30.651387527 +0000 UTC m=+1389.978101255 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-jrcqk" (UniqueName: "kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk") pod "nova-cell1-c4fc-account-create-update-bcx8r" (UID: "ddbe806c-23ac-4f2f-87e1-be1ec2189c87") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.657781 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-njv4t"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.679456 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.680580 4816 generic.go:334] "Generic (PLEG): container finished" podID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerID="deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505" exitCode=143 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.680655 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" event={"ID":"ff0d5c9b-ff09-43bf-977f-e69533c63966","Type":"ContainerDied","Data":"deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.683830 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-fc34-account-create-update-q25nr" event={"ID":"07be83ef-77e3-4327-bcbd-ed16e9a92e40","Type":"ContainerStarted","Data":"f9a4ddb48db5608b5b0664eaa0475f1c311057a3d11b7a67c140a569e435be78"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.686592 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.688931 4816 generic.go:334] "Generic (PLEG): container finished" podID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerID="e1555ec2ef7c331226bf5f3c9dd304c9a719ac9687ba3e0531c29bd7c838c76b" exitCode=143 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.689003 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"975a9d1d-44d0-4b11-8a41-8f237da1ad85","Type":"ContainerDied","Data":"e1555ec2ef7c331226bf5f3c9dd304c9a719ac9687ba3e0531c29bd7c838c76b"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.769014 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-njv4t"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.771138 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7d49f257-3900-43a9-b4c2-353ceeeeea88/ovsdbserver-sb/0.log" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.771233 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7d49f257-3900-43a9-b4c2-353ceeeeea88","Type":"ContainerDied","Data":"2746e3a85e9e93efbb0bef5286b51102cb12a12dcef40dcc18a11315f4192f5e"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.771280 4816 scope.go:117] "RemoveContainer" containerID="21fba53057aa6cf88d2e0405e7ed7ba15f4e8c3f5cb13e82b0cbe8e8ec11ac99" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.771502 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.773079 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d49f257-3900-43a9-b4c2-353ceeeeea88" (UID: "7d49f257-3900-43a9-b4c2-353ceeeeea88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.784152 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.784199 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.789069 4816 generic.go:334] "Generic (PLEG): container finished" podID="a2ea4453-2b42-409d-bac8-b317e43dcf6a" containerID="dbd428f503f966eeb3226bb68ba1f69e1b42e9b75e71221255c203cb87e57a4a" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.789170 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" event={"ID":"a2ea4453-2b42-409d-bac8-b317e43dcf6a","Type":"ContainerDied","Data":"dbd428f503f966eeb3226bb68ba1f69e1b42e9b75e71221255c203cb87e57a4a"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.804179 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.804288 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rfd9r" event={"ID":"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b","Type":"ContainerDied","Data":"8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.810329 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1245-account-create-update-k2kgr"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.815206 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mvvwk" event={"ID":"161ac06a-fdce-4a22-b21c-d9a297bf4142","Type":"ContainerStarted","Data":"6102f7b1a321f650a3c7a67437394cba867978c4cd1d364e1858648afad1d022"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.815250 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mvvwk" event={"ID":"161ac06a-fdce-4a22-b21c-d9a297bf4142","Type":"ContainerStarted","Data":"2b8aa14c2d3209be6ee82c27deb5661aada48946547867f21781fad6b358a3eb"} Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.846824 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: if [ -n "barbican" ]; then Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="barbican" Feb 16 13:26:28 crc kubenswrapper[4816]: else Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:28 crc kubenswrapper[4816]: fi Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:28 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:28 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:28 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:28 crc kubenswrapper[4816]: # support updates Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.847317 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: if [ -n "nova_api" ]; then Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="nova_api" Feb 16 13:26:28 crc kubenswrapper[4816]: else Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:28 crc kubenswrapper[4816]: fi Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:28 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:28 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:28 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:28 crc kubenswrapper[4816]: # support updates Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.847911 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: if [ -n "neutron" ]; then Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="neutron" Feb 16 13:26:28 crc kubenswrapper[4816]: else Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:28 crc kubenswrapper[4816]: fi Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:28 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:28 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:28 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:28 crc kubenswrapper[4816]: # support updates Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.848262 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-fc34-account-create-update-q25nr" podUID="07be83ef-77e3-4327-bcbd-ed16e9a92e40" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.848254 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-bcx8r"] Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.848684 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack/nova-api-1245-account-create-update-k2kgr" podUID="04914705-9b32-4bae-a7d7-e5bcc15337ac" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.848722 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-jrcqk operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" podUID="ddbe806c-23ac-4f2f-87e1-be1ec2189c87" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.849012 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack/neutron-e623-account-create-update-9hfsw" podUID="2576d56f-8296-4693-911b-20b6814cd8aa" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.851880 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qxznq_e9f02af3-d5f6-4c8c-81b6-9889d79b0925/openstack-network-exporter/0.log" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.851917 4816 generic.go:334] "Generic (PLEG): container finished" podID="e9f02af3-d5f6-4c8c-81b6-9889d79b0925" containerID="ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786" exitCode=2 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.852319 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qxznq" event={"ID":"e9f02af3-d5f6-4c8c-81b6-9889d79b0925","Type":"ContainerDied","Data":"ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.852384 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qxznq" event={"ID":"e9f02af3-d5f6-4c8c-81b6-9889d79b0925","Type":"ContainerDied","Data":"77f2cb4125f7b591ccbdd43677e57982b05d6d57b3f6c04b744457270b044c66"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.852513 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qxznq" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.857005 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-z8cp5"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.868920 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.869178 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-log" containerID="cri-o://4960b5e9a538e65382b2f5a58519a9af1c316bd816511a7121ed68a138e98322" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.869300 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-metadata" containerID="cri-o://bdb9b742ad3735c49ae99cebe69749569e35206e6f4aa144a488ecb8a622f11d" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.893765 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.894063 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-log" containerID="cri-o://bc1beb983a2e186cb5db4c2a6fcde47de90c5bb66dbd822cf870630ece875a2b" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.894232 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-api" containerID="cri-o://0835c326e6522f8ce700ad606e2e71a3e72b02b2702bf969281105422fd2bf4b" gracePeriod=30 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.899509 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-twj49"] Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.904958 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: if [ -n "placement" ]; then Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="placement" Feb 16 13:26:28 crc kubenswrapper[4816]: else Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:28 crc kubenswrapper[4816]: fi Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:28 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:28 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:28 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:28 crc kubenswrapper[4816]: # support updates Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.909898 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack/placement-6847-account-create-update-vdkv9" podUID="deed1b26-cbe9-476b-8cc3-9898c6ad929f" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.913700 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-jsr6m"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.920458 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.920633 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.920714 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.920772 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.920823 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.920891 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.920943 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.920997 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921046 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921093 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921146 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921197 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921245 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921302 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921388 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921464 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921540 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921600 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921659 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.921750 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.924933 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.925222 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.925305 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.925378 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.925459 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.925531 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.925603 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.925747 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.923233 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "cca53be3-2b0f-4523-8fc4-d992bf72a13c" (UID: "cca53be3-2b0f-4523-8fc4-d992bf72a13c"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.932305 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-z8cp5"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.944219 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-z4v4v"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.948798 4816 generic.go:334] "Generic (PLEG): container finished" podID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerID="be17341e9a947a7465a0a48dedaaad75abdb811c1f90696013b655b457845fe7" exitCode=143 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.948882 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6c7838cc-1729-405a-af1e-ad0f1b9884f7","Type":"ContainerDied","Data":"be17341e9a947a7465a0a48dedaaad75abdb811c1f90696013b655b457845fe7"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.951471 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-twj49"] Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.962125 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279/ovsdbserver-nb/0.log" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.962237 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279","Type":"ContainerDied","Data":"c4d2d1e05b42fbf3dd6dd32ef929666c7cd1d05b0015769de940328c5daa4e25"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.962381 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.966277 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1657-account-create-update-jfrmp" event={"ID":"e3af0ec9-22bb-4119-b349-bef284903316","Type":"ContainerStarted","Data":"20f4ce9e840959c8bd5187864004c9d2be92ac9805956704da3971fca1cd37ff"} Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.967062 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-z4v4v"] Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.968132 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: if [ -n "glance" ]; then Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="glance" Feb 16 13:26:28 crc kubenswrapper[4816]: else Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:28 crc kubenswrapper[4816]: fi Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:28 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:28 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:28 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:28 crc kubenswrapper[4816]: # support updates Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.969853 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:28 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: if [ -n "nova_cell0" ]; then Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="nova_cell0" Feb 16 13:26:28 crc kubenswrapper[4816]: else Feb 16 13:26:28 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:28 crc kubenswrapper[4816]: fi Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:28 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:28 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:28 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:28 crc kubenswrapper[4816]: # support updates Feb 16 13:26:28 crc kubenswrapper[4816]: Feb 16 13:26:28 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.969899 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack/glance-1657-account-create-update-jfrmp" podUID="e3af0ec9-22bb-4119-b349-bef284903316" Feb 16 13:26:28 crc kubenswrapper[4816]: E0216 13:26:28.971545 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" podUID="574f06c2-d10b-4b4c-b047-16f0bec03b42" Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.984385 4816 generic.go:334] "Generic (PLEG): container finished" podID="4ae7c256-cd2e-4919-a488-84526307d47c" containerID="4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea" exitCode=0 Feb 16 13:26:28 crc kubenswrapper[4816]: I0216 13:26:28.984644 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8ff9ccb6f-bwqh8" event={"ID":"4ae7c256-cd2e-4919-a488-84526307d47c","Type":"ContainerDied","Data":"4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea"} Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.014786 4816 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.018004 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9f02af3-d5f6-4c8c-81b6-9889d79b0925" (UID: "e9f02af3-d5f6-4c8c-81b6-9889d79b0925"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.031943 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.034060 4816 generic.go:334] "Generic (PLEG): container finished" podID="1940a629-51c3-4dca-a26d-02080dabbd68" containerID="b6ca05adab300fd9ccf2880dc760585e09aaf0866632bf3a934d424a6c6e0afc" exitCode=143 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.034292 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1940a629-51c3-4dca-a26d-02080dabbd68","Type":"ContainerDied","Data":"b6ca05adab300fd9ccf2880dc760585e09aaf0866632bf3a934d424a6c6e0afc"} Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.054056 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.054310 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="6ba4481b-c2a7-4156-b054-8179b24cdb66" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3" gracePeriod=30 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.055453 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.065922 4816 generic.go:334] "Generic (PLEG): container finished" podID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerID="0a37364f47721e42e8d7d8c8e7e0b76b9f09f0c7e0a00afcf23bbc67bb3d615e" exitCode=143 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.066016 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5ffd8b88f4-cqjcr" event={"ID":"26c49ecf-0c54-4aa7-893f-861370b1cdbd","Type":"ContainerDied","Data":"0a37364f47721e42e8d7d8c8e7e0b76b9f09f0c7e0a00afcf23bbc67bb3d615e"} Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.068987 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cca53be3-2b0f-4523-8fc4-d992bf72a13c" (UID: "cca53be3-2b0f-4523-8fc4-d992bf72a13c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.072685 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-mvvwk"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.117983 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.118011 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.127548 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "7d49f257-3900-43a9-b4c2-353ceeeeea88" (UID: "7d49f257-3900-43a9-b4c2-353ceeeeea88"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.127621 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" (UID: "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.128319 4816 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 16 13:26:29 crc kubenswrapper[4816]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[/bin/sh -c #!/bin/bash Feb 16 13:26:29 crc kubenswrapper[4816]: Feb 16 13:26:29 crc kubenswrapper[4816]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Feb 16 13:26:29 crc kubenswrapper[4816]: Feb 16 13:26:29 crc kubenswrapper[4816]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Feb 16 13:26:29 crc kubenswrapper[4816]: Feb 16 13:26:29 crc kubenswrapper[4816]: MYSQL_CMD="mysql -h -u root -P 3306" Feb 16 13:26:29 crc kubenswrapper[4816]: Feb 16 13:26:29 crc kubenswrapper[4816]: if [ -n "cinder" ]; then Feb 16 13:26:29 crc kubenswrapper[4816]: GRANT_DATABASE="cinder" Feb 16 13:26:29 crc kubenswrapper[4816]: else Feb 16 13:26:29 crc kubenswrapper[4816]: GRANT_DATABASE="*" Feb 16 13:26:29 crc kubenswrapper[4816]: fi Feb 16 13:26:29 crc kubenswrapper[4816]: Feb 16 13:26:29 crc kubenswrapper[4816]: # going for maximum compatibility here: Feb 16 13:26:29 crc kubenswrapper[4816]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Feb 16 13:26:29 crc kubenswrapper[4816]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Feb 16 13:26:29 crc kubenswrapper[4816]: # 3. create user with CREATE but then do all password and TLS with ALTER to Feb 16 13:26:29 crc kubenswrapper[4816]: # support updates Feb 16 13:26:29 crc kubenswrapper[4816]: Feb 16 13:26:29 crc kubenswrapper[4816]: $MYSQL_CMD < logger="UnhandledError" Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.129476 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack/cinder-7489-account-create-update-rr6kg" podUID="bfa5bec4-12b0-4788-a9d2-4dc39afd56e3" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.148100 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="9eb39773-46a3-4f31-a95a-64a183dbe417" containerName="rabbitmq" containerID="cri-o://663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb" gracePeriod=604800 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.156738 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "cca53be3-2b0f-4523-8fc4-d992bf72a13c" (UID: "cca53be3-2b0f-4523-8fc4-d992bf72a13c"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.161848 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "7d49f257-3900-43a9-b4c2-353ceeeeea88" (UID: "7d49f257-3900-43a9-b4c2-353ceeeeea88"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.162075 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-fc34-account-create-update-q25nr"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.176880 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" (UID: "0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.178348 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.222083 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.222116 4816 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d49f257-3900-43a9-b4c2-353ceeeeea88-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.222129 4816 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cca53be3-2b0f-4523-8fc4-d992bf72a13c-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.222141 4816 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.222153 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.222254 4816 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.222306 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:31.22228988 +0000 UTC m=+1390.549003608 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scripts" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.222367 4816 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.222443 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:31.222426853 +0000 UTC m=+1390.549140581 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.222484 4816 secret.go:188] Couldn't get secret openstack/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.222507 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:31.222499995 +0000 UTC m=+1390.549213713 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scheduler-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.225007 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-1657-account-create-update-jfrmp"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.243808 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.244051 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7554f0b7-4174-4950-ab00-aa21ecf64b56" containerName="nova-scheduler-scheduler" containerID="cri-o://bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663" gracePeriod=30 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.247072 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="8cdb34b1-893f-4701-89b2-195db5c6c03b" containerName="galera" containerID="cri-o://e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8" gracePeriod=30 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.264191 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="ecfcee51-c740-477a-87d9-558fffc58686" containerName="rabbitmq" containerID="cri-o://2fdeaad8597fbc86132995a2af976c1b8f4746137b8a80f5965aad1ee988bc45" gracePeriod=604800 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.272888 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e623-account-create-update-9hfsw"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.281046 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "e9f02af3-d5f6-4c8c-81b6-9889d79b0925" (UID: "e9f02af3-d5f6-4c8c-81b6-9889d79b0925"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.293037 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1245-account-create-update-k2kgr"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.294061 4816 scope.go:117] "RemoveContainer" containerID="7796a4222d77debe25c1ebfb5ba503cfca8c10695560b5942ca5b18381233000" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.300720 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.322942 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-nb\") pod \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.323240 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-config\") pod \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.323300 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-sb\") pod \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.323334 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-svc\") pod \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.323392 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-swift-storage-0\") pod \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.323416 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjzck\" (UniqueName: \"kubernetes.io/projected/a2ea4453-2b42-409d-bac8-b317e43dcf6a-kube-api-access-bjzck\") pod \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\" (UID: \"a2ea4453-2b42-409d-bac8-b317e43dcf6a\") " Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.323781 4816 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f02af3-d5f6-4c8c-81b6-9889d79b0925-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.330825 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2ea4453-2b42-409d-bac8-b317e43dcf6a-kube-api-access-bjzck" (OuterVolumeSpecName: "kube-api-access-bjzck") pod "a2ea4453-2b42-409d-bac8-b317e43dcf6a" (UID: "a2ea4453-2b42-409d-bac8-b317e43dcf6a"). InnerVolumeSpecName "kube-api-access-bjzck". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.346327 4816 scope.go:117] "RemoveContainer" containerID="ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.348580 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.348796 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="b266490b-4d0a-4463-8818-2bcdc39cdf88" containerName="nova-cell0-conductor-conductor" containerID="cri-o://61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437" gracePeriod=30 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.384945 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a2ea4453-2b42-409d-bac8-b317e43dcf6a" (UID: "a2ea4453-2b42-409d-bac8-b317e43dcf6a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.392755 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-28k78"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.400583 4816 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/nova-cell1-conductor-0" secret="" err="secret \"nova-nova-dockercfg-mrtw9\" not found" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.409571 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a2ea4453-2b42-409d-bac8-b317e43dcf6a" (UID: "a2ea4453-2b42-409d-bac8-b317e43dcf6a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.427604 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.427640 4816 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.427652 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjzck\" (UniqueName: \"kubernetes.io/projected/a2ea4453-2b42-409d-bac8-b317e43dcf6a-kube-api-access-bjzck\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.427995 4816 secret.go:188] Couldn't get secret openstack/nova-cell1-conductor-config-data: secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.428125 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data podName:911fa155-35a3-49ee-9bc0-f10a8bac544d nodeName:}" failed. No retries permitted until 2026-02-16 13:26:29.928104571 +0000 UTC m=+1389.254818299 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data") pod "nova-cell1-conductor-0" (UID: "911fa155-35a3-49ee-9bc0-f10a8bac544d") : secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.449291 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a2ea4453-2b42-409d-bac8-b317e43dcf6a" (UID: "a2ea4453-2b42-409d-bac8-b317e43dcf6a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.508581 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-mvvwk" podStartSLOduration=4.508564218 podStartE2EDuration="4.508564218s" podCreationTimestamp="2026-02-16 13:26:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 13:26:28.868112447 +0000 UTC m=+1388.194826175" watchObservedRunningTime="2026-02-16 13:26:29.508564218 +0000 UTC m=+1388.835277946" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.515647 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="119a94e7-cb72-4388-b3f0-78d2de19889f" path="/var/lib/kubelet/pods/119a94e7-cb72-4388-b3f0-78d2de19889f/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.516570 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="138bffa0-61c1-4fee-951a-1f54bc811535" path="/var/lib/kubelet/pods/138bffa0-61c1-4fee-951a-1f54bc811535/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.517167 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e221f94-9c2b-4f98-bc3b-3342bc071e6c" path="/var/lib/kubelet/pods/2e221f94-9c2b-4f98-bc3b-3342bc071e6c/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.519436 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3892f567-eaba-40b3-ab11-f49a067ec298" path="/var/lib/kubelet/pods/3892f567-eaba-40b3-ab11-f49a067ec298/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.520464 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a2ea4453-2b42-409d-bac8-b317e43dcf6a" (UID: "a2ea4453-2b42-409d-bac8-b317e43dcf6a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.520720 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58201649-b6df-4c32-a1c2-b672eefca745" path="/var/lib/kubelet/pods/58201649-b6df-4c32-a1c2-b672eefca745/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.521680 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="778f3898-c77a-4905-9c88-4f0222c75817" path="/var/lib/kubelet/pods/778f3898-c77a-4905-9c88-4f0222c75817/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.522789 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7aca2c9-78a1-48a0-b26c-b19a546eeeba" path="/var/lib/kubelet/pods/b7aca2c9-78a1-48a0-b26c-b19a546eeeba/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.523469 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdb6e3b2-f49c-4d96-81e7-3fc6c360425f" path="/var/lib/kubelet/pods/bdb6e3b2-f49c-4d96-81e7-3fc6c360425f/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.525281 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c13d2f68-a630-4bfc-a909-0c83418e55bc" path="/var/lib/kubelet/pods/c13d2f68-a630-4bfc-a909-0c83418e55bc/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.526375 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cca53be3-2b0f-4523-8fc4-d992bf72a13c" path="/var/lib/kubelet/pods/cca53be3-2b0f-4523-8fc4-d992bf72a13c/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.526932 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da21e294-2b36-4937-80ec-15c429fe6be8" path="/var/lib/kubelet/pods/da21e294-2b36-4937-80ec-15c429fe6be8/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.527492 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eac035cf-66af-4b1a-9798-8e7a05975c7e" path="/var/lib/kubelet/pods/eac035cf-66af-4b1a-9798-8e7a05975c7e/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.528094 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0d98c91-1333-411c-9307-90e0efd8005b" path="/var/lib/kubelet/pods/f0d98c91-1333-411c-9307-90e0efd8005b/volumes" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.529624 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.529641 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.532478 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-config" (OuterVolumeSpecName: "config") pod "a2ea4453-2b42-409d-bac8-b317e43dcf6a" (UID: "a2ea4453-2b42-409d-bac8-b317e43dcf6a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.631413 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ea4453-2b42-409d-bac8-b317e43dcf6a-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.631708 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.631788 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data podName:ecfcee51-c740-477a-87d9-558fffc58686 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:31.631768293 +0000 UTC m=+1390.958482021 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data") pod "rabbitmq-cell1-server-0" (UID: "ecfcee51-c740-477a-87d9-558fffc58686") : configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711361 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-28k78"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711408 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6847-account-create-update-vdkv9"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711423 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46pcb"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711435 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-46pcb"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711449 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711465 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-jsr6m"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711481 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711490 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711503 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-89f5bdcc-rdr9p"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.711723 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-89f5bdcc-rdr9p" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-httpd" containerID="cri-o://a8e282eef394bdb8b3559f783af4f640c6a7bb4f9d7e1a7ac0a8e8e3c4b0bafb" gracePeriod=30 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.712136 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-89f5bdcc-rdr9p" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-server" containerID="cri-o://b1b0de881416dfc3b1efa9b3cceea64ff96d9d1db02f7c7b5b1c9240e3757b44" gracePeriod=30 Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.782479 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-qxznq"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.791189 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-qxznq"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.799377 4816 scope.go:117] "RemoveContainer" containerID="ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786" Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.800079 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786\": container with ID starting with ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786 not found: ID does not exist" containerID="ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.800131 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786"} err="failed to get container status \"ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786\": rpc error: code = NotFound desc = could not find container \"ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786\": container with ID starting with ae761944d290d2b180230f617a992477a61e362dafb51ab8c878c0aa0b240786 not found: ID does not exist" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.800163 4816 scope.go:117] "RemoveContainer" containerID="928d717d6d418e23ea08ee966cb3977d296a5c9fa2ddbce5816ef4e5e13e37d9" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.818071 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.825501 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.828218 4816 scope.go:117] "RemoveContainer" containerID="6e64aee290d85b9d87d4f4f3be6dbde686eecaeb8eb080e46016bd90348ba633" Feb 16 13:26:29 crc kubenswrapper[4816]: I0216 13:26:29.913733 4816 scope.go:117] "RemoveContainer" containerID="cad7db43bacb78d56110ccb724697599cb15ce0faf3ee895f3eca64fe020fab6" Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.945868 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.945956 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data podName:9eb39773-46a3-4f31-a95a-64a183dbe417 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:33.945938724 +0000 UTC m=+1393.272652452 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data") pod "rabbitmq-server-0" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417") : configmap "rabbitmq-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.946221 4816 secret.go:188] Couldn't get secret openstack/nova-cell1-conductor-config-data: secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:29 crc kubenswrapper[4816]: E0216 13:26:29.946307 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data podName:911fa155-35a3-49ee-9bc0-f10a8bac544d nodeName:}" failed. No retries permitted until 2026-02-16 13:26:30.946284614 +0000 UTC m=+1390.272998382 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data") pod "nova-cell1-conductor-0" (UID: "911fa155-35a3-49ee-9bc0-f10a8bac544d") : secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.006715 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.046090 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.046801 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-vencrypt-tls-certs\") pod \"6ba4481b-c2a7-4156-b054-8179b24cdb66\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.046886 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-config-data\") pod \"6ba4481b-c2a7-4156-b054-8179b24cdb66\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.047000 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwr6x\" (UniqueName: \"kubernetes.io/projected/6ba4481b-c2a7-4156-b054-8179b24cdb66-kube-api-access-vwr6x\") pod \"6ba4481b-c2a7-4156-b054-8179b24cdb66\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.047477 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-combined-ca-bundle\") pod \"6ba4481b-c2a7-4156-b054-8179b24cdb66\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.047564 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-nova-novncproxy-tls-certs\") pod \"6ba4481b-c2a7-4156-b054-8179b24cdb66\" (UID: \"6ba4481b-c2a7-4156-b054-8179b24cdb66\") " Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.049591 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.053016 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.053081 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="b266490b-4d0a-4463-8818-2bcdc39cdf88" containerName="nova-cell0-conductor-conductor" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.053357 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ba4481b-c2a7-4156-b054-8179b24cdb66-kube-api-access-vwr6x" (OuterVolumeSpecName: "kube-api-access-vwr6x") pod "6ba4481b-c2a7-4156-b054-8179b24cdb66" (UID: "6ba4481b-c2a7-4156-b054-8179b24cdb66"). InnerVolumeSpecName "kube-api-access-vwr6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.080637 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-config-data" (OuterVolumeSpecName: "config-data") pod "6ba4481b-c2a7-4156-b054-8179b24cdb66" (UID: "6ba4481b-c2a7-4156-b054-8179b24cdb66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.088620 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e623-account-create-update-9hfsw" event={"ID":"2576d56f-8296-4693-911b-20b6814cd8aa","Type":"ContainerStarted","Data":"7c17e5d810c5adc1b927d242236b8cf3052cda0bb3ec3ddabdb1c507526c35b3"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.095048 4816 generic.go:334] "Generic (PLEG): container finished" podID="27fde082-22f6-49b6-9750-796875a2fe49" containerID="bc1beb983a2e186cb5db4c2a6fcde47de90c5bb66dbd822cf870630ece875a2b" exitCode=143 Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.095099 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27fde082-22f6-49b6-9750-796875a2fe49","Type":"ContainerDied","Data":"bc1beb983a2e186cb5db4c2a6fcde47de90c5bb66dbd822cf870630ece875a2b"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.101918 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "6ba4481b-c2a7-4156-b054-8179b24cdb66" (UID: "6ba4481b-c2a7-4156-b054-8179b24cdb66"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.102084 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6847-account-create-update-vdkv9" event={"ID":"deed1b26-cbe9-476b-8cc3-9898c6ad929f","Type":"ContainerStarted","Data":"777f88d4c509fcbb7197f145ca1d4e75406941ab3c4c287ad04d323c28ffbdba"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.105112 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ba4481b-c2a7-4156-b054-8179b24cdb66" (UID: "6ba4481b-c2a7-4156-b054-8179b24cdb66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.119371 4816 generic.go:334] "Generic (PLEG): container finished" podID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerID="b0b6d2f42d6bfd99410fd4fbcca38774a4fbd1a10e9d1373d8a5aa64dbbd9e0a" exitCode=143 Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.119437 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-757c679767-6dfp5" event={"ID":"585ce61c-bb97-4b2c-bea8-c55d06e6db79","Type":"ContainerDied","Data":"b0b6d2f42d6bfd99410fd4fbcca38774a4fbd1a10e9d1373d8a5aa64dbbd9e0a"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.137959 4816 generic.go:334] "Generic (PLEG): container finished" podID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerID="4960b5e9a538e65382b2f5a58519a9af1c316bd816511a7121ed68a138e98322" exitCode=143 Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.138067 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"892fbdbb-3c72-45c9-8987-4bd9a01ddf98","Type":"ContainerDied","Data":"4960b5e9a538e65382b2f5a58519a9af1c316bd816511a7121ed68a138e98322"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.142679 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" event={"ID":"574f06c2-d10b-4b4c-b047-16f0bec03b42","Type":"ContainerStarted","Data":"debc775f5e9c7a4e41423feb0ca6afbf3eb5ef490cac1d3e038d264ddb0fbeb5"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.150077 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwr6x\" (UniqueName: \"kubernetes.io/projected/6ba4481b-c2a7-4156-b054-8179b24cdb66-kube-api-access-vwr6x\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.150104 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.150114 4816 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.150123 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.153458 4816 generic.go:334] "Generic (PLEG): container finished" podID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerID="c3ee070672541f2475c7a5b84908e9db321ca75bcdb3b2238999b588eeb68da3" exitCode=143 Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.153513 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b75bc5544-lb94h" event={"ID":"592c5e3a-06b6-4b36-8281-c5c49051c945","Type":"ContainerDied","Data":"c3ee070672541f2475c7a5b84908e9db321ca75bcdb3b2238999b588eeb68da3"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.158116 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1245-account-create-update-k2kgr" event={"ID":"04914705-9b32-4bae-a7d7-e5bcc15337ac","Type":"ContainerStarted","Data":"2112000ac94082fe250ef78be81cc20c5fbf9568d57ece42c6fbafda5ad133fd"} Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.181189 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.205805 4816 generic.go:334] "Generic (PLEG): container finished" podID="6ba4481b-c2a7-4156-b054-8179b24cdb66" containerID="227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3" exitCode=0 Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.205880 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6ba4481b-c2a7-4156-b054-8179b24cdb66","Type":"ContainerDied","Data":"227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.205911 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6ba4481b-c2a7-4156-b054-8179b24cdb66","Type":"ContainerDied","Data":"6fda7aa0a6da8f35a801453e464e8ef6d330f708295fe46fc3a74f81ddc9a2cb"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.205930 4816 scope.go:117] "RemoveContainer" containerID="227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.206037 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.217840 4816 generic.go:334] "Generic (PLEG): container finished" podID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerID="934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e" exitCode=0 Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.217916 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"59470ba6-bdc1-455a-abeb-f0757dcba5f6","Type":"ContainerDied","Data":"934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.218083 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "6ba4481b-c2a7-4156-b054-8179b24cdb66" (UID: "6ba4481b-c2a7-4156-b054-8179b24cdb66"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.230488 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.231939 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" event={"ID":"a2ea4453-2b42-409d-bac8-b317e43dcf6a","Type":"ContainerDied","Data":"15cb8fe97ee36652a0b3cda7e7ab73a13b64e8f54bb9c313592b37634f035337"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.232048 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-smv6j" Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.240704 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.240763 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="7554f0b7-4174-4950-ab00-aa21ecf64b56" containerName="nova-scheduler-scheduler" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.254085 4816 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ba4481b-c2a7-4156-b054-8179b24cdb66-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.294844 4816 generic.go:334] "Generic (PLEG): container finished" podID="161ac06a-fdce-4a22-b21c-d9a297bf4142" containerID="6102f7b1a321f650a3c7a67437394cba867978c4cd1d364e1858648afad1d022" exitCode=1 Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.294935 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.295980 4816 scope.go:117] "RemoveContainer" containerID="6102f7b1a321f650a3c7a67437394cba867978c4cd1d364e1858648afad1d022" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.296425 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mvvwk" event={"ID":"161ac06a-fdce-4a22-b21c-d9a297bf4142","Type":"ContainerDied","Data":"6102f7b1a321f650a3c7a67437394cba867978c4cd1d364e1858648afad1d022"} Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.296455 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-smv6j"] Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.296684 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="911fa155-35a3-49ee-9bc0-f10a8bac544d" containerName="nova-cell1-conductor-conductor" containerID="cri-o://67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213" gracePeriod=30 Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.305461 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-smv6j"] Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.308483 4816 scope.go:117] "RemoveContainer" containerID="227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3" Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.309431 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3\": container with ID starting with 227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3 not found: ID does not exist" containerID="227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.309472 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3"} err="failed to get container status \"227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3\": rpc error: code = NotFound desc = could not find container \"227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3\": container with ID starting with 227b7999f7f5245225c7500123f15076d0f7c2c929244d29721a4cee5afee7a3 not found: ID does not exist" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.309498 4816 scope.go:117] "RemoveContainer" containerID="dbd428f503f966eeb3226bb68ba1f69e1b42e9b75e71221255c203cb87e57a4a" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.310714 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.360714 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.379792 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.384018 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-89f5bdcc-rdr9p" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.168:8080/healthcheck\": dial tcp 10.217.0.168:8080: connect: connection refused" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.384144 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-89f5bdcc-rdr9p" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.168:8080/healthcheck\": dial tcp 10.217.0.168:8080: connect: connection refused" Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.398366 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.398423 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="911fa155-35a3-49ee-9bc0-f10a8bac544d" containerName="nova-cell1-conductor-conductor" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.403893 4816 scope.go:117] "RemoveContainer" containerID="d43de8d62d0459a351f147af09bf04360bdb291a156206a415357ae792d24702" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.675314 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.675767 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrcqk\" (UniqueName: \"kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk\") pod \"nova-cell1-c4fc-account-create-update-bcx8r\" (UID: \"ddbe806c-23ac-4f2f-87e1-be1ec2189c87\") " pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.676868 4816 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.676927 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts podName:ddbe806c-23ac-4f2f-87e1-be1ec2189c87 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:34.676908799 +0000 UTC m=+1394.003622537 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts") pod "nova-cell1-c4fc-account-create-update-bcx8r" (UID: "ddbe806c-23ac-4f2f-87e1-be1ec2189c87") : configmap "openstack-cell1-scripts" not found Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.681932 4816 projected.go:194] Error preparing data for projected volume kube-api-access-jrcqk for pod openstack/nova-cell1-c4fc-account-create-update-bcx8r: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.682098 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk podName:ddbe806c-23ac-4f2f-87e1-be1ec2189c87 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:34.68207836 +0000 UTC m=+1394.008792088 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-jrcqk" (UniqueName: "kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk") pod "nova-cell1-c4fc-account-create-update-bcx8r" (UID: "ddbe806c-23ac-4f2f-87e1-be1ec2189c87") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.783702 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.812757 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.823425 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.823899 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.838427 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.867265 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.879223 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2576d56f-8296-4693-911b-20b6814cd8aa-operator-scripts\") pod \"2576d56f-8296-4693-911b-20b6814cd8aa\" (UID: \"2576d56f-8296-4693-911b-20b6814cd8aa\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.879284 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07be83ef-77e3-4327-bcbd-ed16e9a92e40-operator-scripts\") pod \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\" (UID: \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.879343 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw5l8\" (UniqueName: \"kubernetes.io/projected/07be83ef-77e3-4327-bcbd-ed16e9a92e40-kube-api-access-gw5l8\") pod \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\" (UID: \"07be83ef-77e3-4327-bcbd-ed16e9a92e40\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.879400 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deed1b26-cbe9-476b-8cc3-9898c6ad929f-operator-scripts\") pod \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\" (UID: \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.879940 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07be83ef-77e3-4327-bcbd-ed16e9a92e40-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "07be83ef-77e3-4327-bcbd-ed16e9a92e40" (UID: "07be83ef-77e3-4327-bcbd-ed16e9a92e40"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.879951 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2576d56f-8296-4693-911b-20b6814cd8aa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2576d56f-8296-4693-911b-20b6814cd8aa" (UID: "2576d56f-8296-4693-911b-20b6814cd8aa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.879971 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/deed1b26-cbe9-476b-8cc3-9898c6ad929f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "deed1b26-cbe9-476b-8cc3-9898c6ad929f" (UID: "deed1b26-cbe9-476b-8cc3-9898c6ad929f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.880180 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6n2p\" (UniqueName: \"kubernetes.io/projected/2576d56f-8296-4693-911b-20b6814cd8aa-kube-api-access-j6n2p\") pod \"2576d56f-8296-4693-911b-20b6814cd8aa\" (UID: \"2576d56f-8296-4693-911b-20b6814cd8aa\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.880262 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqqvh\" (UniqueName: \"kubernetes.io/projected/deed1b26-cbe9-476b-8cc3-9898c6ad929f-kube-api-access-bqqvh\") pod \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\" (UID: \"deed1b26-cbe9-476b-8cc3-9898c6ad929f\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.880935 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2576d56f-8296-4693-911b-20b6814cd8aa-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.880958 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07be83ef-77e3-4327-bcbd-ed16e9a92e40-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.880970 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deed1b26-cbe9-476b-8cc3-9898c6ad929f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.888607 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.909939 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2576d56f-8296-4693-911b-20b6814cd8aa-kube-api-access-j6n2p" (OuterVolumeSpecName: "kube-api-access-j6n2p") pod "2576d56f-8296-4693-911b-20b6814cd8aa" (UID: "2576d56f-8296-4693-911b-20b6814cd8aa"). InnerVolumeSpecName "kube-api-access-j6n2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.910480 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07be83ef-77e3-4327-bcbd-ed16e9a92e40-kube-api-access-gw5l8" (OuterVolumeSpecName: "kube-api-access-gw5l8") pod "07be83ef-77e3-4327-bcbd-ed16e9a92e40" (UID: "07be83ef-77e3-4327-bcbd-ed16e9a92e40"). InnerVolumeSpecName "kube-api-access-gw5l8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.913420 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deed1b26-cbe9-476b-8cc3-9898c6ad929f-kube-api-access-bqqvh" (OuterVolumeSpecName: "kube-api-access-bqqvh") pod "deed1b26-cbe9-476b-8cc3-9898c6ad929f" (UID: "deed1b26-cbe9-476b-8cc3-9898c6ad929f"). InnerVolumeSpecName "kube-api-access-bqqvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.945432 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.971943 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.982491 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-combined-ca-bundle\") pod \"8cdb34b1-893f-4701-89b2-195db5c6c03b\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.982747 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/574f06c2-d10b-4b4c-b047-16f0bec03b42-operator-scripts\") pod \"574f06c2-d10b-4b4c-b047-16f0bec03b42\" (UID: \"574f06c2-d10b-4b4c-b047-16f0bec03b42\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.982809 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-generated\") pod \"8cdb34b1-893f-4701-89b2-195db5c6c03b\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.982866 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04914705-9b32-4bae-a7d7-e5bcc15337ac-operator-scripts\") pod \"04914705-9b32-4bae-a7d7-e5bcc15337ac\" (UID: \"04914705-9b32-4bae-a7d7-e5bcc15337ac\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.982896 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c97xh\" (UniqueName: \"kubernetes.io/projected/04914705-9b32-4bae-a7d7-e5bcc15337ac-kube-api-access-c97xh\") pod \"04914705-9b32-4bae-a7d7-e5bcc15337ac\" (UID: \"04914705-9b32-4bae-a7d7-e5bcc15337ac\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.982920 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"8cdb34b1-893f-4701-89b2-195db5c6c03b\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.982951 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxhl2\" (UniqueName: \"kubernetes.io/projected/8cdb34b1-893f-4701-89b2-195db5c6c03b-kube-api-access-hxhl2\") pod \"8cdb34b1-893f-4701-89b2-195db5c6c03b\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.982988 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nczk\" (UniqueName: \"kubernetes.io/projected/574f06c2-d10b-4b4c-b047-16f0bec03b42-kube-api-access-2nczk\") pod \"574f06c2-d10b-4b4c-b047-16f0bec03b42\" (UID: \"574f06c2-d10b-4b4c-b047-16f0bec03b42\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.983028 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-default\") pod \"8cdb34b1-893f-4701-89b2-195db5c6c03b\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.983081 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-operator-scripts\") pod \"8cdb34b1-893f-4701-89b2-195db5c6c03b\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.983137 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-kolla-config\") pod \"8cdb34b1-893f-4701-89b2-195db5c6c03b\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.983162 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-galera-tls-certs\") pod \"8cdb34b1-893f-4701-89b2-195db5c6c03b\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.983728 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6n2p\" (UniqueName: \"kubernetes.io/projected/2576d56f-8296-4693-911b-20b6814cd8aa-kube-api-access-j6n2p\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.983745 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqqvh\" (UniqueName: \"kubernetes.io/projected/deed1b26-cbe9-476b-8cc3-9898c6ad929f-kube-api-access-bqqvh\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.983760 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw5l8\" (UniqueName: \"kubernetes.io/projected/07be83ef-77e3-4327-bcbd-ed16e9a92e40-kube-api-access-gw5l8\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.983842 4816 secret.go:188] Couldn't get secret openstack/nova-cell1-conductor-config-data: secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.984436 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04914705-9b32-4bae-a7d7-e5bcc15337ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "04914705-9b32-4bae-a7d7-e5bcc15337ac" (UID: "04914705-9b32-4bae-a7d7-e5bcc15337ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: E0216 13:26:30.984568 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data podName:911fa155-35a3-49ee-9bc0-f10a8bac544d nodeName:}" failed. No retries permitted until 2026-02-16 13:26:32.984546081 +0000 UTC m=+1392.311259829 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data") pod "nova-cell1-conductor-0" (UID: "911fa155-35a3-49ee-9bc0-f10a8bac544d") : secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.984732 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "8cdb34b1-893f-4701-89b2-195db5c6c03b" (UID: "8cdb34b1-893f-4701-89b2-195db5c6c03b"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.984781 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "8cdb34b1-893f-4701-89b2-195db5c6c03b" (UID: "8cdb34b1-893f-4701-89b2-195db5c6c03b"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.984906 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/574f06c2-d10b-4b4c-b047-16f0bec03b42-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "574f06c2-d10b-4b4c-b047-16f0bec03b42" (UID: "574f06c2-d10b-4b4c-b047-16f0bec03b42"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.985481 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "8cdb34b1-893f-4701-89b2-195db5c6c03b" (UID: "8cdb34b1-893f-4701-89b2-195db5c6c03b"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.985743 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8cdb34b1-893f-4701-89b2-195db5c6c03b" (UID: "8cdb34b1-893f-4701-89b2-195db5c6c03b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.991980 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cdb34b1-893f-4701-89b2-195db5c6c03b-kube-api-access-hxhl2" (OuterVolumeSpecName: "kube-api-access-hxhl2") pod "8cdb34b1-893f-4701-89b2-195db5c6c03b" (UID: "8cdb34b1-893f-4701-89b2-195db5c6c03b"). InnerVolumeSpecName "kube-api-access-hxhl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:30 crc kubenswrapper[4816]: I0216 13:26:30.999827 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04914705-9b32-4bae-a7d7-e5bcc15337ac-kube-api-access-c97xh" (OuterVolumeSpecName: "kube-api-access-c97xh") pod "04914705-9b32-4bae-a7d7-e5bcc15337ac" (UID: "04914705-9b32-4bae-a7d7-e5bcc15337ac"). InnerVolumeSpecName "kube-api-access-c97xh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.000722 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/574f06c2-d10b-4b4c-b047-16f0bec03b42-kube-api-access-2nczk" (OuterVolumeSpecName: "kube-api-access-2nczk") pod "574f06c2-d10b-4b4c-b047-16f0bec03b42" (UID: "574f06c2-d10b-4b4c-b047-16f0bec03b42"). InnerVolumeSpecName "kube-api-access-2nczk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.006396 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.014679 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.165:8776/healthcheck\": read tcp 10.217.0.2:44522->10.217.0.165:8776: read: connection reset by peer" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.027130 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "mysql-db") pod "8cdb34b1-893f-4701-89b2-195db5c6c03b" (UID: "8cdb34b1-893f-4701-89b2-195db5c6c03b"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.085389 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8cdb34b1-893f-4701-89b2-195db5c6c03b" (UID: "8cdb34b1-893f-4701-89b2-195db5c6c03b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.085180 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3af0ec9-22bb-4119-b349-bef284903316-operator-scripts\") pod \"e3af0ec9-22bb-4119-b349-bef284903316\" (UID: \"e3af0ec9-22bb-4119-b349-bef284903316\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.085628 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcx8g\" (UniqueName: \"kubernetes.io/projected/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-kube-api-access-jcx8g\") pod \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\" (UID: \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.085706 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3af0ec9-22bb-4119-b349-bef284903316-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e3af0ec9-22bb-4119-b349-bef284903316" (UID: "e3af0ec9-22bb-4119-b349-bef284903316"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.085712 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct8p5\" (UniqueName: \"kubernetes.io/projected/e3af0ec9-22bb-4119-b349-bef284903316-kube-api-access-ct8p5\") pod \"e3af0ec9-22bb-4119-b349-bef284903316\" (UID: \"e3af0ec9-22bb-4119-b349-bef284903316\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.085764 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-combined-ca-bundle\") pod \"8cdb34b1-893f-4701-89b2-195db5c6c03b\" (UID: \"8cdb34b1-893f-4701-89b2-195db5c6c03b\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.085886 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-operator-scripts\") pod \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\" (UID: \"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087078 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/574f06c2-d10b-4b4c-b047-16f0bec03b42-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087114 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-generated\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087128 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04914705-9b32-4bae-a7d7-e5bcc15337ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087139 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c97xh\" (UniqueName: \"kubernetes.io/projected/04914705-9b32-4bae-a7d7-e5bcc15337ac-kube-api-access-c97xh\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087168 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087180 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxhl2\" (UniqueName: \"kubernetes.io/projected/8cdb34b1-893f-4701-89b2-195db5c6c03b-kube-api-access-hxhl2\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087192 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nczk\" (UniqueName: \"kubernetes.io/projected/574f06c2-d10b-4b4c-b047-16f0bec03b42-kube-api-access-2nczk\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087208 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-config-data-default\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087220 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087232 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3af0ec9-22bb-4119-b349-bef284903316-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.087242 4816 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8cdb34b1-893f-4701-89b2-195db5c6c03b-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.089355 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bfa5bec4-12b0-4788-a9d2-4dc39afd56e3" (UID: "bfa5bec4-12b0-4788-a9d2-4dc39afd56e3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: W0216 13:26:31.089423 4816 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/8cdb34b1-893f-4701-89b2-195db5c6c03b/volumes/kubernetes.io~secret/combined-ca-bundle Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.089430 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8cdb34b1-893f-4701-89b2-195db5c6c03b" (UID: "8cdb34b1-893f-4701-89b2-195db5c6c03b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.102807 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-kube-api-access-jcx8g" (OuterVolumeSpecName: "kube-api-access-jcx8g") pod "bfa5bec4-12b0-4788-a9d2-4dc39afd56e3" (UID: "bfa5bec4-12b0-4788-a9d2-4dc39afd56e3"). InnerVolumeSpecName "kube-api-access-jcx8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.102910 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3af0ec9-22bb-4119-b349-bef284903316-kube-api-access-ct8p5" (OuterVolumeSpecName: "kube-api-access-ct8p5") pod "e3af0ec9-22bb-4119-b349-bef284903316" (UID: "e3af0ec9-22bb-4119-b349-bef284903316"). InnerVolumeSpecName "kube-api-access-ct8p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.109062 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.130745 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "8cdb34b1-893f-4701-89b2-195db5c6c03b" (UID: "8cdb34b1-893f-4701-89b2-195db5c6c03b"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.233522 4816 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.233547 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcx8g\" (UniqueName: \"kubernetes.io/projected/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-kube-api-access-jcx8g\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.233556 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cdb34b1-893f-4701-89b2-195db5c6c03b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.233565 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct8p5\" (UniqueName: \"kubernetes.io/projected/e3af0ec9-22bb-4119-b349-bef284903316-kube-api-access-ct8p5\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.233574 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.233583 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.233629 4816 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.233680 4816 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.233722 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:35.233700256 +0000 UTC m=+1394.560414094 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-config-data" not found Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.233748 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:35.233736517 +0000 UTC m=+1394.560450365 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scripts" not found Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.234824 4816 secret.go:188] Couldn't get secret openstack/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.234922 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:35.23490301 +0000 UTC m=+1394.561616828 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scheduler-config-data" not found Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.329226 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.329505 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="ceilometer-central-agent" containerID="cri-o://f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c" gracePeriod=30 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.330005 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="proxy-httpd" containerID="cri-o://daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552" gracePeriod=30 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.330066 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="sg-core" containerID="cri-o://d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c" gracePeriod=30 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.330110 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="ceilometer-notification-agent" containerID="cri-o://f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08" gracePeriod=30 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.356962 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.357160 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="28ab0c0d-5c1e-403d-a3d9-234a5c723884" containerName="kube-state-metrics" containerID="cri-o://eaecabde41b4e021829e9c54ac76b1d41288afa9788f12cfc4efc87303bf69b9" gracePeriod=30 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.429586 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-7489-account-create-update-rr6kg" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.442920 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" path="/var/lib/kubelet/pods/0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279/volumes" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.444576 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25fd1ebc-4f40-4a30-8685-05b050cca498" path="/var/lib/kubelet/pods/25fd1ebc-4f40-4a30-8685-05b050cca498/volumes" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.445554 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.445901 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ba4481b-c2a7-4156-b054-8179b24cdb66" path="/var/lib/kubelet/pods/6ba4481b-c2a7-4156-b054-8179b24cdb66/volumes" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.451571 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" path="/var/lib/kubelet/pods/7d49f257-3900-43a9-b4c2-353ceeeeea88/volumes" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.452363 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2ea4453-2b42-409d-bac8-b317e43dcf6a" path="/var/lib/kubelet/pods/a2ea4453-2b42-409d-bac8-b317e43dcf6a/volumes" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.453732 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c300f0d3-72e0-46f6-9910-9dda53a08e13" path="/var/lib/kubelet/pods/c300f0d3-72e0-46f6-9910-9dda53a08e13/volumes" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.455084 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9f02af3-d5f6-4c8c-81b6-9889d79b0925" path="/var/lib/kubelet/pods/e9f02af3-d5f6-4c8c-81b6-9889d79b0925/volumes" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.458326 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-7489-account-create-update-rr6kg" event={"ID":"bfa5bec4-12b0-4788-a9d2-4dc39afd56e3","Type":"ContainerDied","Data":"6ce176b5df0e2b72fe7e1c6fa3706f50b4b7c9967bd8a35b15d2b449628cbf7d"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.473998 4816 generic.go:334] "Generic (PLEG): container finished" podID="161ac06a-fdce-4a22-b21c-d9a297bf4142" containerID="b3abd28222842a555834426e0436b1d3f06efdd34b364c03292e465a51f57ba1" exitCode=1 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.474100 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mvvwk" event={"ID":"161ac06a-fdce-4a22-b21c-d9a297bf4142","Type":"ContainerDied","Data":"b3abd28222842a555834426e0436b1d3f06efdd34b364c03292e465a51f57ba1"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.474140 4816 scope.go:117] "RemoveContainer" containerID="6102f7b1a321f650a3c7a67437394cba867978c4cd1d364e1858648afad1d022" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.474833 4816 scope.go:117] "RemoveContainer" containerID="b3abd28222842a555834426e0436b1d3f06efdd34b364c03292e465a51f57ba1" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.475100 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-mvvwk_openstack(161ac06a-fdce-4a22-b21c-d9a297bf4142)\"" pod="openstack/root-account-create-update-mvvwk" podUID="161ac06a-fdce-4a22-b21c-d9a297bf4142" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.511741 4816 generic.go:334] "Generic (PLEG): container finished" podID="8cdb34b1-893f-4701-89b2-195db5c6c03b" containerID="e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8" exitCode=0 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.511821 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8cdb34b1-893f-4701-89b2-195db5c6c03b","Type":"ContainerDied","Data":"e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.511845 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8cdb34b1-893f-4701-89b2-195db5c6c03b","Type":"ContainerDied","Data":"2b6455348f4b61d063a9f3c519fcce891590ef1ac0f4dc572d0e2ce5be92cb8f"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.511925 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.520595 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-fc34-account-create-update-q25nr" event={"ID":"07be83ef-77e3-4327-bcbd-ed16e9a92e40","Type":"ContainerDied","Data":"f9a4ddb48db5608b5b0664eaa0475f1c311057a3d11b7a67c140a569e435be78"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.520799 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-fc34-account-create-update-q25nr" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.530162 4816 generic.go:334] "Generic (PLEG): container finished" podID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerID="2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3" exitCode=0 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.530221 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" event={"ID":"ff0d5c9b-ff09-43bf-977f-e69533c63966","Type":"ContainerDied","Data":"2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.530247 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" event={"ID":"ff0d5c9b-ff09-43bf-977f-e69533c63966","Type":"ContainerDied","Data":"62340f1d587df0812585af4125da416fa66e04d90db26a84d8ca70065bbc5a4d"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.530302 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-cbf6d8974-7ddwq" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.536989 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e623-account-create-update-9hfsw" event={"ID":"2576d56f-8296-4693-911b-20b6814cd8aa","Type":"ContainerDied","Data":"7c17e5d810c5adc1b927d242236b8cf3052cda0bb3ec3ddabdb1c507526c35b3"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.537081 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e623-account-create-update-9hfsw" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.561616 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data-custom\") pod \"ff0d5c9b-ff09-43bf-977f-e69533c63966\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.561767 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d5c9b-ff09-43bf-977f-e69533c63966-logs\") pod \"ff0d5c9b-ff09-43bf-977f-e69533c63966\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.561956 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v54q2\" (UniqueName: \"kubernetes.io/projected/ff0d5c9b-ff09-43bf-977f-e69533c63966-kube-api-access-v54q2\") pod \"ff0d5c9b-ff09-43bf-977f-e69533c63966\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.561997 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data\") pod \"ff0d5c9b-ff09-43bf-977f-e69533c63966\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.562030 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-combined-ca-bundle\") pod \"ff0d5c9b-ff09-43bf-977f-e69533c63966\" (UID: \"ff0d5c9b-ff09-43bf-977f-e69533c63966\") " Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.564697 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff0d5c9b-ff09-43bf-977f-e69533c63966-logs" (OuterVolumeSpecName: "logs") pod "ff0d5c9b-ff09-43bf-977f-e69533c63966" (UID: "ff0d5c9b-ff09-43bf-977f-e69533c63966"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.567165 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6847-account-create-update-vdkv9" event={"ID":"deed1b26-cbe9-476b-8cc3-9898c6ad929f","Type":"ContainerDied","Data":"777f88d4c509fcbb7197f145ca1d4e75406941ab3c4c287ad04d323c28ffbdba"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.567333 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.579879 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d5c9b-ff09-43bf-977f-e69533c63966-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.579878 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.580129 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" containerName="memcached" containerID="cri-o://d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6" gracePeriod=30 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.592925 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff0d5c9b-ff09-43bf-977f-e69533c63966-kube-api-access-v54q2" (OuterVolumeSpecName: "kube-api-access-v54q2") pod "ff0d5c9b-ff09-43bf-977f-e69533c63966" (UID: "ff0d5c9b-ff09-43bf-977f-e69533c63966"). InnerVolumeSpecName "kube-api-access-v54q2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.623553 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ff0d5c9b-ff09-43bf-977f-e69533c63966" (UID: "ff0d5c9b-ff09-43bf-977f-e69533c63966"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.627359 4816 generic.go:334] "Generic (PLEG): container finished" podID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerID="b4a948c900f30d0434262ce037301027d88ee90b747aed9a2dc7d36fc8c7b454" exitCode=0 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.627423 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"975a9d1d-44d0-4b11-8a41-8f237da1ad85","Type":"ContainerDied","Data":"b4a948c900f30d0434262ce037301027d88ee90b747aed9a2dc7d36fc8c7b454"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.640224 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-8fa6-account-create-update-t2hcp"] Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.642823 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1657-account-create-update-jfrmp" event={"ID":"e3af0ec9-22bb-4119-b349-bef284903316","Type":"ContainerDied","Data":"20f4ce9e840959c8bd5187864004c9d2be92ac9805956704da3971fca1cd37ff"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.642940 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1657-account-create-update-jfrmp" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.673596 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-8fa6-account-create-update-t2hcp"] Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.682940 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v54q2\" (UniqueName: \"kubernetes.io/projected/ff0d5c9b-ff09-43bf-977f-e69533c63966-kube-api-access-v54q2\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.682978 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.683056 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.683117 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data podName:ecfcee51-c740-477a-87d9-558fffc58686 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:35.683099791 +0000 UTC m=+1395.009813509 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data") pod "rabbitmq-cell1-server-0" (UID: "ecfcee51-c740-477a-87d9-558fffc58686") : configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.708452 4816 generic.go:334] "Generic (PLEG): container finished" podID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerID="b1b0de881416dfc3b1efa9b3cceea64ff96d9d1db02f7c7b5b1c9240e3757b44" exitCode=0 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.708489 4816 generic.go:334] "Generic (PLEG): container finished" podID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerID="a8e282eef394bdb8b3559f783af4f640c6a7bb4f9d7e1a7ac0a8e8e3c4b0bafb" exitCode=0 Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.708576 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-89f5bdcc-rdr9p" event={"ID":"fd68bcb4-cb94-422a-b44a-7fd47d309f0a","Type":"ContainerDied","Data":"b1b0de881416dfc3b1efa9b3cceea64ff96d9d1db02f7c7b5b1c9240e3757b44"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.708612 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-89f5bdcc-rdr9p" event={"ID":"fd68bcb4-cb94-422a-b44a-7fd47d309f0a","Type":"ContainerDied","Data":"a8e282eef394bdb8b3559f783af4f640c6a7bb4f9d7e1a7ac0a8e8e3c4b0bafb"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.730865 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff0d5c9b-ff09-43bf-977f-e69533c63966" (UID: "ff0d5c9b-ff09-43bf-977f-e69533c63966"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.733121 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" event={"ID":"574f06c2-d10b-4b4c-b047-16f0bec03b42","Type":"ContainerDied","Data":"debc775f5e9c7a4e41423feb0ca6afbf3eb5ef490cac1d3e038d264ddb0fbeb5"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.733253 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d27b-account-create-update-jsr6m" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.735414 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c4fc-account-create-update-bcx8r" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.735598 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1245-account-create-update-k2kgr" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.735648 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1245-account-create-update-k2kgr" event={"ID":"04914705-9b32-4bae-a7d7-e5bcc15337ac","Type":"ContainerDied","Data":"2112000ac94082fe250ef78be81cc20c5fbf9568d57ece42c6fbafda5ad133fd"} Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.737927 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-8fa6-account-create-update-nb7z5"] Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738282 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerName="ovsdbserver-nb" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738300 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerName="ovsdbserver-nb" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738308 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ba4481b-c2a7-4156-b054-8179b24cdb66" containerName="nova-cell1-novncproxy-novncproxy" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738315 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ba4481b-c2a7-4156-b054-8179b24cdb66" containerName="nova-cell1-novncproxy-novncproxy" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738330 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerName="barbican-keystone-listener" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738336 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerName="barbican-keystone-listener" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738353 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9f02af3-d5f6-4c8c-81b6-9889d79b0925" containerName="openstack-network-exporter" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738359 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9f02af3-d5f6-4c8c-81b6-9889d79b0925" containerName="openstack-network-exporter" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738367 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerName="openstack-network-exporter" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738373 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerName="openstack-network-exporter" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738380 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2ea4453-2b42-409d-bac8-b317e43dcf6a" containerName="dnsmasq-dns" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738386 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2ea4453-2b42-409d-bac8-b317e43dcf6a" containerName="dnsmasq-dns" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738396 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerName="openstack-network-exporter" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738401 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerName="openstack-network-exporter" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738416 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerName="ovsdbserver-sb" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738422 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerName="ovsdbserver-sb" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738432 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdb34b1-893f-4701-89b2-195db5c6c03b" containerName="mysql-bootstrap" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738439 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdb34b1-893f-4701-89b2-195db5c6c03b" containerName="mysql-bootstrap" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738447 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdb34b1-893f-4701-89b2-195db5c6c03b" containerName="galera" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738453 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdb34b1-893f-4701-89b2-195db5c6c03b" containerName="galera" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738467 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2ea4453-2b42-409d-bac8-b317e43dcf6a" containerName="init" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738472 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2ea4453-2b42-409d-bac8-b317e43dcf6a" containerName="init" Feb 16 13:26:31 crc kubenswrapper[4816]: E0216 13:26:31.738489 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerName="barbican-keystone-listener-log" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738495 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerName="barbican-keystone-listener-log" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738690 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerName="ovsdbserver-nb" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738717 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e895c6d-3b17-40a3-a5b7-b1a2ce1b5279" containerName="openstack-network-exporter" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738730 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2ea4453-2b42-409d-bac8-b317e43dcf6a" containerName="dnsmasq-dns" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738740 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerName="barbican-keystone-listener-log" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738755 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9f02af3-d5f6-4c8c-81b6-9889d79b0925" containerName="openstack-network-exporter" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738766 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff0d5c9b-ff09-43bf-977f-e69533c63966" containerName="barbican-keystone-listener" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738774 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerName="ovsdbserver-sb" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738784 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ba4481b-c2a7-4156-b054-8179b24cdb66" containerName="nova-cell1-novncproxy-novncproxy" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738802 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cdb34b1-893f-4701-89b2-195db5c6c03b" containerName="galera" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.738809 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d49f257-3900-43a9-b4c2-353ceeeeea88" containerName="openstack-network-exporter" Feb 16 13:26:31 crc kubenswrapper[4816]: I0216 13:26:31.739334 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.750594 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.757879 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8fa6-account-create-update-nb7z5"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.757922 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data" (OuterVolumeSpecName: "config-data") pod "ff0d5c9b-ff09-43bf-977f-e69533c63966" (UID: "ff0d5c9b-ff09-43bf-977f-e69533c63966"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.784280 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts\") pod \"keystone-8fa6-account-create-update-nb7z5\" (UID: \"283d5223-0722-43a2-bf1c-0dd876d0313c\") " pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.784438 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdtxq\" (UniqueName: \"kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq\") pod \"keystone-8fa6-account-create-update-nb7z5\" (UID: \"283d5223-0722-43a2-bf1c-0dd876d0313c\") " pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.784517 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.784528 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d5c9b-ff09-43bf-977f-e69533c63966-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.833455 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-ndrvk"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.853075 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-d27p4"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.868501 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-d27p4"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.886504 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdtxq\" (UniqueName: \"kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq\") pod \"keystone-8fa6-account-create-update-nb7z5\" (UID: \"283d5223-0722-43a2-bf1c-0dd876d0313c\") " pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.886604 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts\") pod \"keystone-8fa6-account-create-update-nb7z5\" (UID: \"283d5223-0722-43a2-bf1c-0dd876d0313c\") " pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:31.886772 4816 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:31.886820 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts podName:283d5223-0722-43a2-bf1c-0dd876d0313c nodeName:}" failed. No retries permitted until 2026-02-16 13:26:32.386805594 +0000 UTC m=+1391.713519322 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts") pod "keystone-8fa6-account-create-update-nb7z5" (UID: "283d5223-0722-43a2-bf1c-0dd876d0313c") : configmap "openstack-scripts" not found Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.887163 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-ndrvk"] Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:31.890540 4816 projected.go:194] Error preparing data for projected volume kube-api-access-gdtxq for pod openstack/keystone-8fa6-account-create-update-nb7z5: failed to fetch token: serviceaccounts "galera-openstack" not found Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:31.890614 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq podName:283d5223-0722-43a2-bf1c-0dd876d0313c nodeName:}" failed. No retries permitted until 2026-02-16 13:26:32.390591918 +0000 UTC m=+1391.717305646 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-gdtxq" (UniqueName: "kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq") pod "keystone-8fa6-account-create-update-nb7z5" (UID: "283d5223-0722-43a2-bf1c-0dd876d0313c") : failed to fetch token: serviceaccounts "galera-openstack" not found Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.896251 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5b47f74bcc-6q6s6"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.896462 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-5b47f74bcc-6q6s6" podUID="b7e01f36-29ae-4e7d-9dfb-c91c3f860060" containerName="keystone-api" containerID="cri-o://98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb" gracePeriod=30 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.910709 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.921781 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-nnjgp"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.930199 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-nnjgp"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.950731 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-8fa6-account-create-update-nb7z5"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:31.965094 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-mvvwk"] Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.067606 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-v9w6q" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" probeResult="failure" output=< Feb 16 13:26:32 crc kubenswrapper[4816]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Feb 16 13:26:32 crc kubenswrapper[4816]: > Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.088234 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.088534 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.093892 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.098047 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.098254 4816 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.109739 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.115810 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.115886 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.200499 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="4a9428a1-a54a-4e85-b898-1eac97438ba8" containerName="galera" containerID="cri-o://9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29" gracePeriod=30 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.344145 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:51504->10.217.0.205:8775: read: connection reset by peer" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.344155 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:51492->10.217.0.205:8775: read: connection reset by peer" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.418476 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdtxq\" (UniqueName: \"kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq\") pod \"keystone-8fa6-account-create-update-nb7z5\" (UID: \"283d5223-0722-43a2-bf1c-0dd876d0313c\") " pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.418627 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts\") pod \"keystone-8fa6-account-create-update-nb7z5\" (UID: \"283d5223-0722-43a2-bf1c-0dd876d0313c\") " pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.418770 4816 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.418831 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts podName:283d5223-0722-43a2-bf1c-0dd876d0313c nodeName:}" failed. No retries permitted until 2026-02-16 13:26:33.418810185 +0000 UTC m=+1392.745523923 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts") pod "keystone-8fa6-account-create-update-nb7z5" (UID: "283d5223-0722-43a2-bf1c-0dd876d0313c") : configmap "openstack-scripts" not found Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.437523 4816 projected.go:194] Error preparing data for projected volume kube-api-access-gdtxq for pod openstack/keystone-8fa6-account-create-update-nb7z5: failed to fetch token: serviceaccounts "galera-openstack" not found Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.437588 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq podName:283d5223-0722-43a2-bf1c-0dd876d0313c nodeName:}" failed. No retries permitted until 2026-02-16 13:26:33.437568837 +0000 UTC m=+1392.764282565 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-gdtxq" (UniqueName: "kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq") pod "keystone-8fa6-account-create-update-nb7z5" (UID: "283d5223-0722-43a2-bf1c-0dd876d0313c") : failed to fetch token: serviceaccounts "galera-openstack" not found Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.761399 4816 generic.go:334] "Generic (PLEG): container finished" podID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerID="527ce40525cb7c9b030d6afba0202147d041220b639540ecbe06dabb3e1425e2" exitCode=0 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.761568 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6c7838cc-1729-405a-af1e-ad0f1b9884f7","Type":"ContainerDied","Data":"527ce40525cb7c9b030d6afba0202147d041220b639540ecbe06dabb3e1425e2"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.761967 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6c7838cc-1729-405a-af1e-ad0f1b9884f7","Type":"ContainerDied","Data":"639ca0f5b315128b6710b73b225ce1d866d10ac9c9fcfbe69a22b955a1a3f8ff"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.761990 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="639ca0f5b315128b6710b73b225ce1d866d10ac9c9fcfbe69a22b955a1a3f8ff" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.766990 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"975a9d1d-44d0-4b11-8a41-8f237da1ad85","Type":"ContainerDied","Data":"5184f1623c127756f239d67fca1a99d1f2ef05617eb57a75de180cf64199386a"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.767047 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5184f1623c127756f239d67fca1a99d1f2ef05617eb57a75de180cf64199386a" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.769021 4816 generic.go:334] "Generic (PLEG): container finished" podID="28ab0c0d-5c1e-403d-a3d9-234a5c723884" containerID="eaecabde41b4e021829e9c54ac76b1d41288afa9788f12cfc4efc87303bf69b9" exitCode=2 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.769086 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28ab0c0d-5c1e-403d-a3d9-234a5c723884","Type":"ContainerDied","Data":"eaecabde41b4e021829e9c54ac76b1d41288afa9788f12cfc4efc87303bf69b9"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.769106 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28ab0c0d-5c1e-403d-a3d9-234a5c723884","Type":"ContainerDied","Data":"ce6cf04ac5ff535df9636eaa8fd53db8b87db6b4f6352539f42ea21e3840f097"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.769117 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce6cf04ac5ff535df9636eaa8fd53db8b87db6b4f6352539f42ea21e3840f097" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.775353 4816 generic.go:334] "Generic (PLEG): container finished" podID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerID="daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552" exitCode=0 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.775398 4816 generic.go:334] "Generic (PLEG): container finished" podID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerID="d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c" exitCode=2 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.775407 4816 generic.go:334] "Generic (PLEG): container finished" podID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerID="f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c" exitCode=0 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.775437 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerDied","Data":"daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.775500 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerDied","Data":"d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.775513 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerDied","Data":"f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.778611 4816 generic.go:334] "Generic (PLEG): container finished" podID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerID="ed64af8ac2faddc8f5b3609993e7e85b7c02038ee89682aa306fb9d136d0c815" exitCode=0 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.778711 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5ffd8b88f4-cqjcr" event={"ID":"26c49ecf-0c54-4aa7-893f-861370b1cdbd","Type":"ContainerDied","Data":"ed64af8ac2faddc8f5b3609993e7e85b7c02038ee89682aa306fb9d136d0c815"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.778741 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5ffd8b88f4-cqjcr" event={"ID":"26c49ecf-0c54-4aa7-893f-861370b1cdbd","Type":"ContainerDied","Data":"fcdf0757eb60c77b310e71e77c8516c1bd19a9d9fc2027399b6b712409b9e45a"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.778758 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcdf0757eb60c77b310e71e77c8516c1bd19a9d9fc2027399b6b712409b9e45a" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.780483 4816 generic.go:334] "Generic (PLEG): container finished" podID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerID="305b8eb6bcfac360528db193c73952f20605bc0004e0f5602cffb736efb9d9ec" exitCode=0 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.780530 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b75bc5544-lb94h" event={"ID":"592c5e3a-06b6-4b36-8281-c5c49051c945","Type":"ContainerDied","Data":"305b8eb6bcfac360528db193c73952f20605bc0004e0f5602cffb736efb9d9ec"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.780553 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b75bc5544-lb94h" event={"ID":"592c5e3a-06b6-4b36-8281-c5c49051c945","Type":"ContainerDied","Data":"547356f04fa0f8348ea58d056613cf41a4d33f9c9ae01797839c445e1d6ff4ea"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.780564 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="547356f04fa0f8348ea58d056613cf41a4d33f9c9ae01797839c445e1d6ff4ea" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.783022 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-89f5bdcc-rdr9p" event={"ID":"fd68bcb4-cb94-422a-b44a-7fd47d309f0a","Type":"ContainerDied","Data":"6340a77ac99776aeb020b01976822fd26699f38ca39fa326120c029fa48e7a90"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.783049 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6340a77ac99776aeb020b01976822fd26699f38ca39fa326120c029fa48e7a90" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.785508 4816 generic.go:334] "Generic (PLEG): container finished" podID="27fde082-22f6-49b6-9750-796875a2fe49" containerID="0835c326e6522f8ce700ad606e2e71a3e72b02b2702bf969281105422fd2bf4b" exitCode=0 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.785560 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27fde082-22f6-49b6-9750-796875a2fe49","Type":"ContainerDied","Data":"0835c326e6522f8ce700ad606e2e71a3e72b02b2702bf969281105422fd2bf4b"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.789767 4816 generic.go:334] "Generic (PLEG): container finished" podID="1940a629-51c3-4dca-a26d-02080dabbd68" containerID="b594b400e21605362a39b0644bd2c43537ea857aedc6e60fe673ee3964203cf8" exitCode=0 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.789872 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1940a629-51c3-4dca-a26d-02080dabbd68","Type":"ContainerDied","Data":"b594b400e21605362a39b0644bd2c43537ea857aedc6e60fe673ee3964203cf8"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.789914 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1940a629-51c3-4dca-a26d-02080dabbd68","Type":"ContainerDied","Data":"dcf24c6ef294d5b0a3ea62a0e377b80d57f49354a4c6688afb006f072591e528"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.789927 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcf24c6ef294d5b0a3ea62a0e377b80d57f49354a4c6688afb006f072591e528" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.794012 4816 generic.go:334] "Generic (PLEG): container finished" podID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerID="83a666709d0398bcf18db5bff64d1c6fa8da80e779c24200130a17a483a8ae2b" exitCode=0 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.794101 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-757c679767-6dfp5" event={"ID":"585ce61c-bb97-4b2c-bea8-c55d06e6db79","Type":"ContainerDied","Data":"83a666709d0398bcf18db5bff64d1c6fa8da80e779c24200130a17a483a8ae2b"} Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.796847 4816 generic.go:334] "Generic (PLEG): container finished" podID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerID="bdb9b742ad3735c49ae99cebe69749569e35206e6f4aa144a488ecb8a622f11d" exitCode=0 Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.797388 4816 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/root-account-create-update-mvvwk" secret="" err="secret \"galera-openstack-dockercfg-szgkj\" not found" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.797423 4816 scope.go:117] "RemoveContainer" containerID="b3abd28222842a555834426e0436b1d3f06efdd34b364c03292e465a51f57ba1" Feb 16 13:26:32 crc kubenswrapper[4816]: I0216 13:26:32.797858 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"892fbdbb-3c72-45c9-8987-4bd9a01ddf98","Type":"ContainerDied","Data":"bdb9b742ad3735c49ae99cebe69749569e35206e6f4aa144a488ecb8a622f11d"} Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.797888 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-mvvwk_openstack(161ac06a-fdce-4a22-b21c-d9a297bf4142)\"" pod="openstack/root-account-create-update-mvvwk" podUID="161ac06a-fdce-4a22-b21c-d9a297bf4142" Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.825218 4816 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Feb 16 13:26:32 crc kubenswrapper[4816]: E0216 13:26:32.825301 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts podName:161ac06a-fdce-4a22-b21c-d9a297bf4142 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:33.325283046 +0000 UTC m=+1392.651996774 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts") pod "root-account-create-update-mvvwk" (UID: "161ac06a-fdce-4a22-b21c-d9a297bf4142") : configmap "openstack-scripts" not found Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.009699 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.012245 4816 scope.go:117] "RemoveContainer" containerID="e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8" Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.030777 4816 secret.go:188] Couldn't get secret openstack/nova-cell1-conductor-config-data: secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.030829 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data podName:911fa155-35a3-49ee-9bc0-f10a8bac544d nodeName:}" failed. No retries permitted until 2026-02-16 13:26:37.030816221 +0000 UTC m=+1396.357529949 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data") pod "nova-cell1-conductor-0" (UID: "911fa155-35a3-49ee-9bc0-f10a8bac544d") : secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.060457 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.069083 4816 scope.go:117] "RemoveContainer" containerID="9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.105712 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133415 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/975a9d1d-44d0-4b11-8a41-8f237da1ad85-logs\") pod \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133474 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-combined-ca-bundle\") pod \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133497 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-public-tls-certs\") pod \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133565 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/975a9d1d-44d0-4b11-8a41-8f237da1ad85-etc-machine-id\") pod \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133582 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-public-tls-certs\") pod \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133688 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jg2zl\" (UniqueName: \"kubernetes.io/projected/975a9d1d-44d0-4b11-8a41-8f237da1ad85-kube-api-access-jg2zl\") pod \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133717 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-etc-swift\") pod \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133738 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-internal-tls-certs\") pod \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133783 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data\") pod \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133804 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-internal-tls-certs\") pod \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133820 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-config-data\") pod \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133843 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-run-httpd\") pod \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133861 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbhtf\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-kube-api-access-dbhtf\") pod \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133882 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-combined-ca-bundle\") pod \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133916 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-scripts\") pod \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133944 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-log-httpd\") pod \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\" (UID: \"fd68bcb4-cb94-422a-b44a-7fd47d309f0a\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.133971 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data-custom\") pod \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\" (UID: \"975a9d1d-44d0-4b11-8a41-8f237da1ad85\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.134452 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/975a9d1d-44d0-4b11-8a41-8f237da1ad85-logs" (OuterVolumeSpecName: "logs") pod "975a9d1d-44d0-4b11-8a41-8f237da1ad85" (UID: "975a9d1d-44d0-4b11-8a41-8f237da1ad85"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.140984 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/975a9d1d-44d0-4b11-8a41-8f237da1ad85-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "975a9d1d-44d0-4b11-8a41-8f237da1ad85" (UID: "975a9d1d-44d0-4b11-8a41-8f237da1ad85"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.141103 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fd68bcb4-cb94-422a-b44a-7fd47d309f0a" (UID: "fd68bcb4-cb94-422a-b44a-7fd47d309f0a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.143388 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-kube-api-access-dbhtf" (OuterVolumeSpecName: "kube-api-access-dbhtf") pod "fd68bcb4-cb94-422a-b44a-7fd47d309f0a" (UID: "fd68bcb4-cb94-422a-b44a-7fd47d309f0a"). InnerVolumeSpecName "kube-api-access-dbhtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.143924 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fd68bcb4-cb94-422a-b44a-7fd47d309f0a" (UID: "fd68bcb4-cb94-422a-b44a-7fd47d309f0a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.144620 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.160816 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "975a9d1d-44d0-4b11-8a41-8f237da1ad85" (UID: "975a9d1d-44d0-4b11-8a41-8f237da1ad85"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.161029 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-gdtxq operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone-8fa6-account-create-update-nb7z5" podUID="283d5223-0722-43a2-bf1c-0dd876d0313c" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.163593 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/975a9d1d-44d0-4b11-8a41-8f237da1ad85-kube-api-access-jg2zl" (OuterVolumeSpecName: "kube-api-access-jg2zl") pod "975a9d1d-44d0-4b11-8a41-8f237da1ad85" (UID: "975a9d1d-44d0-4b11-8a41-8f237da1ad85"). InnerVolumeSpecName "kube-api-access-jg2zl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.167993 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.168803 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-scripts" (OuterVolumeSpecName: "scripts") pod "975a9d1d-44d0-4b11-8a41-8f237da1ad85" (UID: "975a9d1d-44d0-4b11-8a41-8f237da1ad85"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.173707 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-cbf6d8974-7ddwq"] Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.179061 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "fd68bcb4-cb94-422a-b44a-7fd47d309f0a" (UID: "fd68bcb4-cb94-422a-b44a-7fd47d309f0a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.190432 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-cbf6d8974-7ddwq"] Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.191604 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.207095 4816 scope.go:117] "RemoveContainer" containerID="e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.207222 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.207876 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8\": container with ID starting with e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8 not found: ID does not exist" containerID="e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.207924 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8"} err="failed to get container status \"e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8\": rpc error: code = NotFound desc = could not find container \"e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8\": container with ID starting with e961e4fe3231caffa4076702bd021b721a6b7ca97ad0cf9066bea5856f38abc8 not found: ID does not exist" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.207944 4816 scope.go:117] "RemoveContainer" containerID="9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf" Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.208446 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf\": container with ID starting with 9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf not found: ID does not exist" containerID="9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.208496 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf"} err="failed to get container status \"9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf\": rpc error: code = NotFound desc = could not find container \"9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf\": container with ID starting with 9b4d24fd6e387b49a8c0395b42c59570b5dfb78b8a04ce4c7ed8167799bf1ecf not found: ID does not exist" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.208510 4816 scope.go:117] "RemoveContainer" containerID="2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.210421 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.217644 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.223900 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.224047 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "975a9d1d-44d0-4b11-8a41-8f237da1ad85" (UID: "975a9d1d-44d0-4b11-8a41-8f237da1ad85"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.239295 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.242181 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-public-tls-certs\") pod \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.242398 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-internal-tls-certs\") pod \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.242526 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpldd\" (UniqueName: \"kubernetes.io/projected/26c49ecf-0c54-4aa7-893f-861370b1cdbd-kube-api-access-mpldd\") pod \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.242631 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-combined-ca-bundle\") pod \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.242812 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data\") pod \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.242940 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26c49ecf-0c54-4aa7-893f-861370b1cdbd-logs\") pod \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.243030 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-logs\") pod \"1940a629-51c3-4dca-a26d-02080dabbd68\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.243121 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"1940a629-51c3-4dca-a26d-02080dabbd68\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.243251 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-internal-tls-certs\") pod \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.243370 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-combined-ca-bundle\") pod \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.243520 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-config-data\") pod \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.243614 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-scripts\") pod \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.243752 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-677hs\" (UniqueName: \"kubernetes.io/projected/585ce61c-bb97-4b2c-bea8-c55d06e6db79-kube-api-access-677hs\") pod \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.243848 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-httpd-run\") pod \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.243994 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-combined-ca-bundle\") pod \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.244130 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98v5j\" (UniqueName: \"kubernetes.io/projected/1940a629-51c3-4dca-a26d-02080dabbd68-kube-api-access-98v5j\") pod \"1940a629-51c3-4dca-a26d-02080dabbd68\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.244248 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-config-data\") pod \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\" (UID: \"26c49ecf-0c54-4aa7-893f-861370b1cdbd\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.244361 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/585ce61c-bb97-4b2c-bea8-c55d06e6db79-logs\") pod \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.244484 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-scripts\") pod \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.244582 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-httpd-run\") pod \"1940a629-51c3-4dca-a26d-02080dabbd68\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.244701 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-config-data\") pod \"1940a629-51c3-4dca-a26d-02080dabbd68\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.244799 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-combined-ca-bundle\") pod \"1940a629-51c3-4dca-a26d-02080dabbd68\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.244889 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-logs\") pod \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.245018 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data-custom\") pod \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\" (UID: \"585ce61c-bb97-4b2c-bea8-c55d06e6db79\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.245176 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-public-tls-certs\") pod \"1940a629-51c3-4dca-a26d-02080dabbd68\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.245469 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-scripts\") pod \"1940a629-51c3-4dca-a26d-02080dabbd68\" (UID: \"1940a629-51c3-4dca-a26d-02080dabbd68\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.245567 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h55cd\" (UniqueName: \"kubernetes.io/projected/6c7838cc-1729-405a-af1e-ad0f1b9884f7-kube-api-access-h55cd\") pod \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\" (UID: \"6c7838cc-1729-405a-af1e-ad0f1b9884f7\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.246409 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.247215 4816 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/975a9d1d-44d0-4b11-8a41-8f237da1ad85-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248009 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jg2zl\" (UniqueName: \"kubernetes.io/projected/975a9d1d-44d0-4b11-8a41-8f237da1ad85-kube-api-access-jg2zl\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248139 4816 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248268 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248349 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbhtf\" (UniqueName: \"kubernetes.io/projected/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-kube-api-access-dbhtf\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248471 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248571 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248647 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248777 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/975a9d1d-44d0-4b11-8a41-8f237da1ad85-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.247079 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1940a629-51c3-4dca-a26d-02080dabbd68" (UID: "1940a629-51c3-4dca-a26d-02080dabbd68"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.247604 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6c7838cc-1729-405a-af1e-ad0f1b9884f7" (UID: "6c7838cc-1729-405a-af1e-ad0f1b9884f7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248369 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26c49ecf-0c54-4aa7-893f-861370b1cdbd-logs" (OuterVolumeSpecName: "logs") pod "26c49ecf-0c54-4aa7-893f-861370b1cdbd" (UID: "26c49ecf-0c54-4aa7-893f-861370b1cdbd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.248504 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-logs" (OuterVolumeSpecName: "logs") pod "6c7838cc-1729-405a-af1e-ad0f1b9884f7" (UID: "6c7838cc-1729-405a-af1e-ad0f1b9884f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.254859 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/585ce61c-bb97-4b2c-bea8-c55d06e6db79-logs" (OuterVolumeSpecName: "logs") pod "585ce61c-bb97-4b2c-bea8-c55d06e6db79" (UID: "585ce61c-bb97-4b2c-bea8-c55d06e6db79"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.257428 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "6c7838cc-1729-405a-af1e-ad0f1b9884f7" (UID: "6c7838cc-1729-405a-af1e-ad0f1b9884f7"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.258237 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-logs" (OuterVolumeSpecName: "logs") pod "1940a629-51c3-4dca-a26d-02080dabbd68" (UID: "1940a629-51c3-4dca-a26d-02080dabbd68"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.258557 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.261018 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-bcx8r"] Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.264698 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-scripts" (OuterVolumeSpecName: "scripts") pod "1940a629-51c3-4dca-a26d-02080dabbd68" (UID: "1940a629-51c3-4dca-a26d-02080dabbd68"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.266025 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26c49ecf-0c54-4aa7-893f-861370b1cdbd-kube-api-access-mpldd" (OuterVolumeSpecName: "kube-api-access-mpldd") pod "26c49ecf-0c54-4aa7-893f-861370b1cdbd" (UID: "26c49ecf-0c54-4aa7-893f-861370b1cdbd"). InnerVolumeSpecName "kube-api-access-mpldd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.266349 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "fd68bcb4-cb94-422a-b44a-7fd47d309f0a" (UID: "fd68bcb4-cb94-422a-b44a-7fd47d309f0a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.270942 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-c4fc-account-create-update-bcx8r"] Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.272815 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "585ce61c-bb97-4b2c-bea8-c55d06e6db79" (UID: "585ce61c-bb97-4b2c-bea8-c55d06e6db79"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.272826 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "1940a629-51c3-4dca-a26d-02080dabbd68" (UID: "1940a629-51c3-4dca-a26d-02080dabbd68"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.272943 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-scripts" (OuterVolumeSpecName: "scripts") pod "26c49ecf-0c54-4aa7-893f-861370b1cdbd" (UID: "26c49ecf-0c54-4aa7-893f-861370b1cdbd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.272970 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/585ce61c-bb97-4b2c-bea8-c55d06e6db79-kube-api-access-677hs" (OuterVolumeSpecName: "kube-api-access-677hs") pod "585ce61c-bb97-4b2c-bea8-c55d06e6db79" (UID: "585ce61c-bb97-4b2c-bea8-c55d06e6db79"). InnerVolumeSpecName "kube-api-access-677hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.274892 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.276204 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c7838cc-1729-405a-af1e-ad0f1b9884f7-kube-api-access-h55cd" (OuterVolumeSpecName: "kube-api-access-h55cd") pod "6c7838cc-1729-405a-af1e-ad0f1b9884f7" (UID: "6c7838cc-1729-405a-af1e-ad0f1b9884f7"). InnerVolumeSpecName "kube-api-access-h55cd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.280706 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1940a629-51c3-4dca-a26d-02080dabbd68-kube-api-access-98v5j" (OuterVolumeSpecName: "kube-api-access-98v5j") pod "1940a629-51c3-4dca-a26d-02080dabbd68" (UID: "1940a629-51c3-4dca-a26d-02080dabbd68"). InnerVolumeSpecName "kube-api-access-98v5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.281551 4816 scope.go:117] "RemoveContainer" containerID="deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.288259 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-scripts" (OuterVolumeSpecName: "scripts") pod "6c7838cc-1729-405a-af1e-ad0f1b9884f7" (UID: "6c7838cc-1729-405a-af1e-ad0f1b9884f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.311869 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-fc34-account-create-update-q25nr"] Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.323809 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-fc34-account-create-update-q25nr"] Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.331021 4816 scope.go:117] "RemoveContainer" containerID="2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3" Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.331527 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3\": container with ID starting with 2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3 not found: ID does not exist" containerID="2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.331590 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3"} err="failed to get container status \"2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3\": rpc error: code = NotFound desc = could not find container \"2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3\": container with ID starting with 2ffe46750672a43320da20141f744ae2d254bf8f87ca029aae1ac801905588b3 not found: ID does not exist" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.331702 4816 scope.go:117] "RemoveContainer" containerID="deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505" Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.333977 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505\": container with ID starting with deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505 not found: ID does not exist" containerID="deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.334017 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505"} err="failed to get container status \"deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505\": rpc error: code = NotFound desc = could not find container \"deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505\": container with ID starting with deb91450bca78ac7b13f59e5033086e9c40db9f55eb894ac0c78070e3a921505 not found: ID does not exist" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.349612 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data-custom\") pod \"592c5e3a-06b6-4b36-8281-c5c49051c945\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.349730 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-combined-ca-bundle\") pod \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.349801 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-config-data\") pod \"27fde082-22f6-49b6-9750-796875a2fe49\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.349859 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-nova-metadata-tls-certs\") pod \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.349890 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-public-tls-certs\") pod \"27fde082-22f6-49b6-9750-796875a2fe49\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.349963 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-public-tls-certs\") pod \"592c5e3a-06b6-4b36-8281-c5c49051c945\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.350756 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-internal-tls-certs\") pod \"27fde082-22f6-49b6-9750-796875a2fe49\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.350841 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-combined-ca-bundle\") pod \"592c5e3a-06b6-4b36-8281-c5c49051c945\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.350896 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592c5e3a-06b6-4b36-8281-c5c49051c945-logs\") pod \"592c5e3a-06b6-4b36-8281-c5c49051c945\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.350926 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-config-data\") pod \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.350985 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-config\") pod \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351016 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27fde082-22f6-49b6-9750-796875a2fe49-logs\") pod \"27fde082-22f6-49b6-9750-796875a2fe49\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351043 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-combined-ca-bundle\") pod \"27fde082-22f6-49b6-9750-796875a2fe49\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351072 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgk27\" (UniqueName: \"kubernetes.io/projected/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-kube-api-access-lgk27\") pod \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351131 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-certs\") pod \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351152 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wg5z\" (UniqueName: \"kubernetes.io/projected/592c5e3a-06b6-4b36-8281-c5c49051c945-kube-api-access-8wg5z\") pod \"592c5e3a-06b6-4b36-8281-c5c49051c945\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351186 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sbrx\" (UniqueName: \"kubernetes.io/projected/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-api-access-8sbrx\") pod \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351206 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-internal-tls-certs\") pod \"592c5e3a-06b6-4b36-8281-c5c49051c945\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351235 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data\") pod \"592c5e3a-06b6-4b36-8281-c5c49051c945\" (UID: \"592c5e3a-06b6-4b36-8281-c5c49051c945\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351267 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-logs\") pod \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\" (UID: \"892fbdbb-3c72-45c9-8987-4bd9a01ddf98\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351300 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-combined-ca-bundle\") pod \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\" (UID: \"28ab0c0d-5c1e-403d-a3d9-234a5c723884\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351453 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dp5v7\" (UniqueName: \"kubernetes.io/projected/27fde082-22f6-49b6-9750-796875a2fe49-kube-api-access-dp5v7\") pod \"27fde082-22f6-49b6-9750-796875a2fe49\" (UID: \"27fde082-22f6-49b6-9750-796875a2fe49\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.351619 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27fde082-22f6-49b6-9750-796875a2fe49-logs" (OuterVolumeSpecName: "logs") pod "27fde082-22f6-49b6-9750-796875a2fe49" (UID: "27fde082-22f6-49b6-9750-796875a2fe49"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352202 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352219 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-677hs\" (UniqueName: \"kubernetes.io/projected/585ce61c-bb97-4b2c-bea8-c55d06e6db79-kube-api-access-677hs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352230 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352240 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98v5j\" (UniqueName: \"kubernetes.io/projected/1940a629-51c3-4dca-a26d-02080dabbd68-kube-api-access-98v5j\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352250 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrcqk\" (UniqueName: \"kubernetes.io/projected/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-kube-api-access-jrcqk\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352259 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352268 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27fde082-22f6-49b6-9750-796875a2fe49-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352276 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/585ce61c-bb97-4b2c-bea8-c55d06e6db79-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352285 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352293 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352302 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c7838cc-1729-405a-af1e-ad0f1b9884f7-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352311 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352325 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352334 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h55cd\" (UniqueName: \"kubernetes.io/projected/6c7838cc-1729-405a-af1e-ad0f1b9884f7-kube-api-access-h55cd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352711 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbe806c-23ac-4f2f-87e1-be1ec2189c87-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352734 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352745 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpldd\" (UniqueName: \"kubernetes.io/projected/26c49ecf-0c54-4aa7-893f-861370b1cdbd-kube-api-access-mpldd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352754 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26c49ecf-0c54-4aa7-893f-861370b1cdbd-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352763 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1940a629-51c3-4dca-a26d-02080dabbd68-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352777 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.356212 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e623-account-create-update-9hfsw"] Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.352617 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-logs" (OuterVolumeSpecName: "logs") pod "892fbdbb-3c72-45c9-8987-4bd9a01ddf98" (UID: "892fbdbb-3c72-45c9-8987-4bd9a01ddf98"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.355363 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/592c5e3a-06b6-4b36-8281-c5c49051c945-logs" (OuterVolumeSpecName: "logs") pod "592c5e3a-06b6-4b36-8281-c5c49051c945" (UID: "592c5e3a-06b6-4b36-8281-c5c49051c945"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.357600 4816 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.359020 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts podName:161ac06a-fdce-4a22-b21c-d9a297bf4142 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:34.358986294 +0000 UTC m=+1393.685700022 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts") pod "root-account-create-update-mvvwk" (UID: "161ac06a-fdce-4a22-b21c-d9a297bf4142") : configmap "openstack-scripts" not found Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.369513 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.386308 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-e623-account-create-update-9hfsw"] Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.386840 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-api-access-8sbrx" (OuterVolumeSpecName: "kube-api-access-8sbrx") pod "28ab0c0d-5c1e-403d-a3d9-234a5c723884" (UID: "28ab0c0d-5c1e-403d-a3d9-234a5c723884"). InnerVolumeSpecName "kube-api-access-8sbrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.387713 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27fde082-22f6-49b6-9750-796875a2fe49-kube-api-access-dp5v7" (OuterVolumeSpecName: "kube-api-access-dp5v7") pod "27fde082-22f6-49b6-9750-796875a2fe49" (UID: "27fde082-22f6-49b6-9750-796875a2fe49"). InnerVolumeSpecName "kube-api-access-dp5v7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.412000 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/592c5e3a-06b6-4b36-8281-c5c49051c945-kube-api-access-8wg5z" (OuterVolumeSpecName: "kube-api-access-8wg5z") pod "592c5e3a-06b6-4b36-8281-c5c49051c945" (UID: "592c5e3a-06b6-4b36-8281-c5c49051c945"). InnerVolumeSpecName "kube-api-access-8wg5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.412339 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "592c5e3a-06b6-4b36-8281-c5c49051c945" (UID: "592c5e3a-06b6-4b36-8281-c5c49051c945"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.430061 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07be83ef-77e3-4327-bcbd-ed16e9a92e40" path="/var/lib/kubelet/pods/07be83ef-77e3-4327-bcbd-ed16e9a92e40/volumes" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.430512 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2576d56f-8296-4693-911b-20b6814cd8aa" path="/var/lib/kubelet/pods/2576d56f-8296-4693-911b-20b6814cd8aa/volumes" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.431276 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cdb34b1-893f-4701-89b2-195db5c6c03b" path="/var/lib/kubelet/pods/8cdb34b1-893f-4701-89b2-195db5c6c03b/volumes" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.432022 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39" path="/var/lib/kubelet/pods/c95c4bfe-79a7-4485-ad1b-f76a3bcc6e39/volumes" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.439175 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca667270-ef70-4131-a442-3600cdb034c9" path="/var/lib/kubelet/pods/ca667270-ef70-4131-a442-3600cdb034c9/volumes" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.439858 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d60f290e-fa31-45c1-a6ec-857a7ac94394" path="/var/lib/kubelet/pods/d60f290e-fa31-45c1-a6ec-857a7ac94394/volumes" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.440284 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddbe806c-23ac-4f2f-87e1-be1ec2189c87" path="/var/lib/kubelet/pods/ddbe806c-23ac-4f2f-87e1-be1ec2189c87/volumes" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.449596 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f206b784-e69e-420e-a975-95d7e72f7a30" path="/var/lib/kubelet/pods/f206b784-e69e-420e-a975-95d7e72f7a30/volumes" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.450378 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff0d5c9b-ff09-43bf-977f-e69533c63966" path="/var/lib/kubelet/pods/ff0d5c9b-ff09-43bf-977f-e69533c63966/volumes" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.454694 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs9bj\" (UniqueName: \"kubernetes.io/projected/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kube-api-access-rs9bj\") pod \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.455002 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-config-data\") pod \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.455270 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-combined-ca-bundle\") pod \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.455497 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-memcached-tls-certs\") pod \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.455719 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kolla-config\") pod \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\" (UID: \"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb\") " Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.456244 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdtxq\" (UniqueName: \"kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq\") pod \"keystone-8fa6-account-create-update-nb7z5\" (UID: \"283d5223-0722-43a2-bf1c-0dd876d0313c\") " pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.456569 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts\") pod \"keystone-8fa6-account-create-update-nb7z5\" (UID: \"283d5223-0722-43a2-bf1c-0dd876d0313c\") " pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.457049 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592c5e3a-06b6-4b36-8281-c5c49051c945-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.457205 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wg5z\" (UniqueName: \"kubernetes.io/projected/592c5e3a-06b6-4b36-8281-c5c49051c945-kube-api-access-8wg5z\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.457306 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sbrx\" (UniqueName: \"kubernetes.io/projected/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-api-access-8sbrx\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.457466 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-logs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.457584 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dp5v7\" (UniqueName: \"kubernetes.io/projected/27fde082-22f6-49b6-9750-796875a2fe49-kube-api-access-dp5v7\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.457731 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.459221 4816 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.459297 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts podName:283d5223-0722-43a2-bf1c-0dd876d0313c nodeName:}" failed. No retries permitted until 2026-02-16 13:26:35.459273153 +0000 UTC m=+1394.785986871 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts") pod "keystone-8fa6-account-create-update-nb7z5" (UID: "283d5223-0722-43a2-bf1c-0dd876d0313c") : configmap "openstack-scripts" not found Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.459812 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-config-data" (OuterVolumeSpecName: "config-data") pod "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" (UID: "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.461566 4816 projected.go:194] Error preparing data for projected volume kube-api-access-gdtxq for pod openstack/keystone-8fa6-account-create-update-nb7z5: failed to fetch token: serviceaccounts "galera-openstack" not found Feb 16 13:26:33 crc kubenswrapper[4816]: E0216 13:26:33.461838 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq podName:283d5223-0722-43a2-bf1c-0dd876d0313c nodeName:}" failed. No retries permitted until 2026-02-16 13:26:35.461800312 +0000 UTC m=+1394.788514220 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-gdtxq" (UniqueName: "kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq") pod "keystone-8fa6-account-create-update-nb7z5" (UID: "283d5223-0722-43a2-bf1c-0dd876d0313c") : failed to fetch token: serviceaccounts "galera-openstack" not found Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.449713 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-kube-api-access-lgk27" (OuterVolumeSpecName: "kube-api-access-lgk27") pod "892fbdbb-3c72-45c9-8987-4bd9a01ddf98" (UID: "892fbdbb-3c72-45c9-8987-4bd9a01ddf98"). InnerVolumeSpecName "kube-api-access-lgk27". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.468919 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" (UID: "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.475389 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd68bcb4-cb94-422a-b44a-7fd47d309f0a" (UID: "fd68bcb4-cb94-422a-b44a-7fd47d309f0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.488528 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kube-api-access-rs9bj" (OuterVolumeSpecName: "kube-api-access-rs9bj") pod "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" (UID: "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb"). InnerVolumeSpecName "kube-api-access-rs9bj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.587601 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs9bj\" (UniqueName: \"kubernetes.io/projected/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kube-api-access-rs9bj\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.587862 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.587874 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.587884 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgk27\" (UniqueName: \"kubernetes.io/projected/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-kube-api-access-lgk27\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.587892 4816 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.633477 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data" (OuterVolumeSpecName: "config-data") pod "975a9d1d-44d0-4b11-8a41-8f237da1ad85" (UID: "975a9d1d-44d0-4b11-8a41-8f237da1ad85"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.705488 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.784927 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-config-data" (OuterVolumeSpecName: "config-data") pod "fd68bcb4-cb94-422a-b44a-7fd47d309f0a" (UID: "fd68bcb4-cb94-422a-b44a-7fd47d309f0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.809616 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c7838cc-1729-405a-af1e-ad0f1b9884f7" (UID: "6c7838cc-1729-405a-af1e-ad0f1b9884f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.811259 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.811282 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.827600 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28ab0c0d-5c1e-403d-a3d9-234a5c723884" (UID: "28ab0c0d-5c1e-403d-a3d9-234a5c723884"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.829813 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.841161 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.874881 4816 generic.go:334] "Generic (PLEG): container finished" podID="35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" containerID="d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6" exitCode=0 Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.875036 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.884049 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.893196 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "892fbdbb-3c72-45c9-8987-4bd9a01ddf98" (UID: "892fbdbb-3c72-45c9-8987-4bd9a01ddf98"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.902015 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.902826 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5ffd8b88f4-cqjcr" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.902878 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.902826 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-757c679767-6dfp5" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.903074 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.903138 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-89f5bdcc-rdr9p" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.903242 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.903420 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.904903 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b75bc5544-lb94h" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.914325 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.914354 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.914365 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.921696 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1940a629-51c3-4dca-a26d-02080dabbd68" (UID: "1940a629-51c3-4dca-a26d-02080dabbd68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.942711 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" (UID: "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.944172 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.980481 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "585ce61c-bb97-4b2c-bea8-c55d06e6db79" (UID: "585ce61c-bb97-4b2c-bea8-c55d06e6db79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.994889 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "892fbdbb-3c72-45c9-8987-4bd9a01ddf98" (UID: "892fbdbb-3c72-45c9-8987-4bd9a01ddf98"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:33 crc kubenswrapper[4816]: I0216 13:26:33.996472 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27fde082-22f6-49b6-9750-796875a2fe49" (UID: "27fde082-22f6-49b6-9750-796875a2fe49"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.003440 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "592c5e3a-06b6-4b36-8281-c5c49051c945" (UID: "592c5e3a-06b6-4b36-8281-c5c49051c945"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.012866 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-config-data" (OuterVolumeSpecName: "config-data") pod "27fde082-22f6-49b6-9750-796875a2fe49" (UID: "27fde082-22f6-49b6-9750-796875a2fe49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: E0216 13:26:34.016891 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.016938 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: E0216 13:26:34.016968 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data podName:9eb39773-46a3-4f31-a95a-64a183dbe417 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:42.016947484 +0000 UTC m=+1401.343661212 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data") pod "rabbitmq-server-0" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417") : configmap "rabbitmq-config-data" not found Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.017000 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.017016 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.017029 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.017042 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.017056 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.017071 4816 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.017086 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.019689 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1940a629-51c3-4dca-a26d-02080dabbd68" (UID: "1940a629-51c3-4dca-a26d-02080dabbd68"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.055863 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-config-data" (OuterVolumeSpecName: "config-data") pod "26c49ecf-0c54-4aa7-893f-861370b1cdbd" (UID: "26c49ecf-0c54-4aa7-893f-861370b1cdbd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.057167 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "975a9d1d-44d0-4b11-8a41-8f237da1ad85" (UID: "975a9d1d-44d0-4b11-8a41-8f237da1ad85"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.082970 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data" (OuterVolumeSpecName: "config-data") pod "585ce61c-bb97-4b2c-bea8-c55d06e6db79" (UID: "585ce61c-bb97-4b2c-bea8-c55d06e6db79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.085790 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.087350 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "592c5e3a-06b6-4b36-8281-c5c49051c945" (UID: "592c5e3a-06b6-4b36-8281-c5c49051c945"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.095069 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "28ab0c0d-5c1e-403d-a3d9-234a5c723884" (UID: "28ab0c0d-5c1e-403d-a3d9-234a5c723884"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103523 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data" (OuterVolumeSpecName: "config-data") pod "592c5e3a-06b6-4b36-8281-c5c49051c945" (UID: "592c5e3a-06b6-4b36-8281-c5c49051c945"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103815 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"892fbdbb-3c72-45c9-8987-4bd9a01ddf98","Type":"ContainerDied","Data":"11a9ead41658435a1c1a8e03bef21a11d70411f97d3b7ebe337851cbdf647e5b"} Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103854 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1245-account-create-update-k2kgr"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103875 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-1245-account-create-update-k2kgr"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103898 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-jsr6m"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103912 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-d27b-account-create-update-jsr6m"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103931 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-1657-account-create-update-jfrmp"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103945 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-1657-account-create-update-jfrmp"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103967 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-7489-account-create-update-rr6kg"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103981 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-7489-account-create-update-rr6kg"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.103996 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb","Type":"ContainerDied","Data":"d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6"} Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.104013 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"35fffc20-e4dc-43ad-8a7f-64da2e1ceebb","Type":"ContainerDied","Data":"c0b62e380f7cfbbdde6b3f9d9d08896bafb0952a9e6287bad8e11a6560834734"} Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.104025 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27fde082-22f6-49b6-9750-796875a2fe49","Type":"ContainerDied","Data":"8554fb823081f64a56e1bbc1f9c4c51204feaa39a0fe5f9921b3e6681366287e"} Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.104041 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-757c679767-6dfp5" event={"ID":"585ce61c-bb97-4b2c-bea8-c55d06e6db79","Type":"ContainerDied","Data":"9cbb697e7ec76e01a0ae9873825fc00b7de069659e0b00f0c643a89e785386bf"} Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.104101 4816 scope.go:117] "RemoveContainer" containerID="bdb9b742ad3735c49ae99cebe69749569e35206e6f4aa144a488ecb8a622f11d" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.106962 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "27fde082-22f6-49b6-9750-796875a2fe49" (UID: "27fde082-22f6-49b6-9750-796875a2fe49"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.118496 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/585ce61c-bb97-4b2c-bea8-c55d06e6db79-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.118527 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.118539 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.118549 4816 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.118558 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.118568 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.118577 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.118586 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.118834 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6c7838cc-1729-405a-af1e-ad0f1b9884f7" (UID: "6c7838cc-1729-405a-af1e-ad0f1b9884f7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.133877 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-config-data" (OuterVolumeSpecName: "config-data") pod "1940a629-51c3-4dca-a26d-02080dabbd68" (UID: "1940a629-51c3-4dca-a26d-02080dabbd68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.134183 4816 scope.go:117] "RemoveContainer" containerID="4960b5e9a538e65382b2f5a58519a9af1c316bd816511a7121ed68a138e98322" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.137539 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "26c49ecf-0c54-4aa7-893f-861370b1cdbd" (UID: "26c49ecf-0c54-4aa7-893f-861370b1cdbd"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.163042 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "27fde082-22f6-49b6-9750-796875a2fe49" (UID: "27fde082-22f6-49b6-9750-796875a2fe49"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.187788 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-config-data" (OuterVolumeSpecName: "config-data") pod "6c7838cc-1729-405a-af1e-ad0f1b9884f7" (UID: "6c7838cc-1729-405a-af1e-ad0f1b9884f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.192521 4816 scope.go:117] "RemoveContainer" containerID="d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.207845 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" (UID: "35fffc20-e4dc-43ad-8a7f-64da2e1ceebb"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.221646 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27fde082-22f6-49b6-9750-796875a2fe49-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.221694 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.221705 4816 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.221715 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1940a629-51c3-4dca-a26d-02080dabbd68-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.221723 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.221732 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c7838cc-1729-405a-af1e-ad0f1b9884f7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.268493 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "26c49ecf-0c54-4aa7-893f-861370b1cdbd" (UID: "26c49ecf-0c54-4aa7-893f-861370b1cdbd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.274204 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fd68bcb4-cb94-422a-b44a-7fd47d309f0a" (UID: "fd68bcb4-cb94-422a-b44a-7fd47d309f0a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.299761 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.306079 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "975a9d1d-44d0-4b11-8a41-8f237da1ad85" (UID: "975a9d1d-44d0-4b11-8a41-8f237da1ad85"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.309728 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.314349 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.315195 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-config-data" (OuterVolumeSpecName: "config-data") pod "892fbdbb-3c72-45c9-8987-4bd9a01ddf98" (UID: "892fbdbb-3c72-45c9-8987-4bd9a01ddf98"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.323280 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/892fbdbb-3c72-45c9-8987-4bd9a01ddf98-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.323313 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/975a9d1d-44d0-4b11-8a41-8f237da1ad85-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.323321 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd68bcb4-cb94-422a-b44a-7fd47d309f0a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.323330 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.326857 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.333018 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.334770 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "28ab0c0d-5c1e-403d-a3d9-234a5c723884" (UID: "28ab0c0d-5c1e-403d-a3d9-234a5c723884"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.335158 4816 scope.go:117] "RemoveContainer" containerID="d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.340635 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: E0216 13:26:34.341267 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6\": container with ID starting with d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6 not found: ID does not exist" containerID="d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.341291 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6"} err="failed to get container status \"d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6\": rpc error: code = NotFound desc = could not find container \"d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6\": container with ID starting with d21b8e0bc5101e150f0d0291e8f1e98f0513c8fb870dbf4c65d7d0fdd5718bc6 not found: ID does not exist" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.341330 4816 scope.go:117] "RemoveContainer" containerID="0835c326e6522f8ce700ad606e2e71a3e72b02b2702bf969281105422fd2bf4b" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.344396 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "26c49ecf-0c54-4aa7-893f-861370b1cdbd" (UID: "26c49ecf-0c54-4aa7-893f-861370b1cdbd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.347455 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "592c5e3a-06b6-4b36-8281-c5c49051c945" (UID: "592c5e3a-06b6-4b36-8281-c5c49051c945"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.351366 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-757c679767-6dfp5"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.356489 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-757c679767-6dfp5"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.360564 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.392950 4816 scope.go:117] "RemoveContainer" containerID="bc1beb983a2e186cb5db4c2a6fcde47de90c5bb66dbd822cf870630ece875a2b" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.424991 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26c49ecf-0c54-4aa7-893f-861370b1cdbd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.425031 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592c5e3a-06b6-4b36-8281-c5c49051c945-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.425095 4816 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/28ab0c0d-5c1e-403d-a3d9-234a5c723884-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: E0216 13:26:34.425209 4816 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Feb 16 13:26:34 crc kubenswrapper[4816]: E0216 13:26:34.425283 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts podName:161ac06a-fdce-4a22-b21c-d9a297bf4142 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:36.425264456 +0000 UTC m=+1395.751978184 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts") pod "root-account-create-update-mvvwk" (UID: "161ac06a-fdce-4a22-b21c-d9a297bf4142") : configmap "openstack-scripts" not found Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.526533 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts\") pod \"161ac06a-fdce-4a22-b21c-d9a297bf4142\" (UID: \"161ac06a-fdce-4a22-b21c-d9a297bf4142\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.526594 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mllmd\" (UniqueName: \"kubernetes.io/projected/161ac06a-fdce-4a22-b21c-d9a297bf4142-kube-api-access-mllmd\") pod \"161ac06a-fdce-4a22-b21c-d9a297bf4142\" (UID: \"161ac06a-fdce-4a22-b21c-d9a297bf4142\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.530808 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "161ac06a-fdce-4a22-b21c-d9a297bf4142" (UID: "161ac06a-fdce-4a22-b21c-d9a297bf4142"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.531091 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/161ac06a-fdce-4a22-b21c-d9a297bf4142-kube-api-access-mllmd" (OuterVolumeSpecName: "kube-api-access-mllmd") pod "161ac06a-fdce-4a22-b21c-d9a297bf4142" (UID: "161ac06a-fdce-4a22-b21c-d9a297bf4142"). InnerVolumeSpecName "kube-api-access-mllmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.585539 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.592258 4816 scope.go:117] "RemoveContainer" containerID="83a666709d0398bcf18db5bff64d1c6fa8da80e779c24200130a17a483a8ae2b" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.631047 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/161ac06a-fdce-4a22-b21c-d9a297bf4142-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.631089 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mllmd\" (UniqueName: \"kubernetes.io/projected/161ac06a-fdce-4a22-b21c-d9a297bf4142-kube-api-access-mllmd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.647832 4816 scope.go:117] "RemoveContainer" containerID="b0b6d2f42d6bfd99410fd4fbcca38774a4fbd1a10e9d1373d8a5aa64dbbd9e0a" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.680181 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5b75bc5544-lb94h"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.763722 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-combined-ca-bundle\") pod \"4a9428a1-a54a-4e85-b898-1eac97438ba8\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.763820 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-galera-tls-certs\") pod \"4a9428a1-a54a-4e85-b898-1eac97438ba8\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.763915 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"4a9428a1-a54a-4e85-b898-1eac97438ba8\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.764013 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-default\") pod \"4a9428a1-a54a-4e85-b898-1eac97438ba8\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.764040 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-kolla-config\") pod \"4a9428a1-a54a-4e85-b898-1eac97438ba8\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.764073 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c7fl\" (UniqueName: \"kubernetes.io/projected/4a9428a1-a54a-4e85-b898-1eac97438ba8-kube-api-access-9c7fl\") pod \"4a9428a1-a54a-4e85-b898-1eac97438ba8\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.764102 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-generated\") pod \"4a9428a1-a54a-4e85-b898-1eac97438ba8\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.764121 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-operator-scripts\") pod \"4a9428a1-a54a-4e85-b898-1eac97438ba8\" (UID: \"4a9428a1-a54a-4e85-b898-1eac97438ba8\") " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.765339 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "4a9428a1-a54a-4e85-b898-1eac97438ba8" (UID: "4a9428a1-a54a-4e85-b898-1eac97438ba8"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.765381 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a9428a1-a54a-4e85-b898-1eac97438ba8" (UID: "4a9428a1-a54a-4e85-b898-1eac97438ba8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.766929 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "4a9428a1-a54a-4e85-b898-1eac97438ba8" (UID: "4a9428a1-a54a-4e85-b898-1eac97438ba8"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.767302 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "4a9428a1-a54a-4e85-b898-1eac97438ba8" (UID: "4a9428a1-a54a-4e85-b898-1eac97438ba8"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.776984 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a9428a1-a54a-4e85-b898-1eac97438ba8-kube-api-access-9c7fl" (OuterVolumeSpecName: "kube-api-access-9c7fl") pod "4a9428a1-a54a-4e85-b898-1eac97438ba8" (UID: "4a9428a1-a54a-4e85-b898-1eac97438ba8"). InnerVolumeSpecName "kube-api-access-9c7fl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.777523 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5b75bc5544-lb94h"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.787763 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-89f5bdcc-rdr9p"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.820752 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-89f5bdcc-rdr9p"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.821254 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "mysql-db") pod "4a9428a1-a54a-4e85-b898-1eac97438ba8" (UID: "4a9428a1-a54a-4e85-b898-1eac97438ba8"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.837933 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5ffd8b88f4-cqjcr"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.840805 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a9428a1-a54a-4e85-b898-1eac97438ba8" (UID: "4a9428a1-a54a-4e85-b898-1eac97438ba8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.847190 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-5ffd8b88f4-cqjcr"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.857466 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.866613 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.866642 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-default\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.866673 4816 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.866685 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c7fl\" (UniqueName: \"kubernetes.io/projected/4a9428a1-a54a-4e85-b898-1eac97438ba8-kube-api-access-9c7fl\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.866694 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a9428a1-a54a-4e85-b898-1eac97438ba8-config-data-generated\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.866703 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a9428a1-a54a-4e85-b898-1eac97438ba8-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.866711 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.868904 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.876923 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.885898 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.887799 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "4a9428a1-a54a-4e85-b898-1eac97438ba8" (UID: "4a9428a1-a54a-4e85-b898-1eac97438ba8"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.888290 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.893842 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.903648 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.934504 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.942149 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mvvwk" event={"ID":"161ac06a-fdce-4a22-b21c-d9a297bf4142","Type":"ContainerDied","Data":"2b8aa14c2d3209be6ee82c27deb5661aada48946547867f21781fad6b358a3eb"} Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.942214 4816 scope.go:117] "RemoveContainer" containerID="b3abd28222842a555834426e0436b1d3f06efdd34b364c03292e465a51f57ba1" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.942368 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mvvwk" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.970580 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8b7ff418-3104-4d5e-880c-bc9de7258943/ovn-northd/0.log" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.970632 4816 generic.go:334] "Generic (PLEG): container finished" podID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerID="3ab81f6efce6fb86362c42eda7876dd469e8113fb561b222ea56c2868f292aeb" exitCode=139 Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.971174 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.971232 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8b7ff418-3104-4d5e-880c-bc9de7258943","Type":"ContainerDied","Data":"3ab81f6efce6fb86362c42eda7876dd469e8113fb561b222ea56c2868f292aeb"} Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.974888 4816 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a9428a1-a54a-4e85-b898-1eac97438ba8-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.974915 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.991014 4816 generic.go:334] "Generic (PLEG): container finished" podID="4a9428a1-a54a-4e85-b898-1eac97438ba8" containerID="9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29" exitCode=0 Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.991183 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a9428a1-a54a-4e85-b898-1eac97438ba8","Type":"ContainerDied","Data":"9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29"} Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.991226 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a9428a1-a54a-4e85-b898-1eac97438ba8","Type":"ContainerDied","Data":"d243137c22cc21b5b80520bb6757b21be89545b973f84d2df17a484e06e87be6"} Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.991372 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 16 13:26:34 crc kubenswrapper[4816]: I0216 13:26:34.999501 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-mvvwk"] Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.000185 4816 scope.go:117] "RemoveContainer" containerID="9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.007192 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-mvvwk"] Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.048390 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.065518 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8fa6-account-create-update-nb7z5" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.073839 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.073988 4816 scope.go:117] "RemoveContainer" containerID="63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.090772 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.090876 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="b266490b-4d0a-4463-8818-2bcdc39cdf88" containerName="nova-cell0-conductor-conductor" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.091086 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.105034 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.113830 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.202:3000/\": dial tcp 10.217.0.202:3000: connect: connection refused" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.120858 4816 scope.go:117] "RemoveContainer" containerID="9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.133982 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29\": container with ID starting with 9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29 not found: ID does not exist" containerID="9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.134031 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29"} err="failed to get container status \"9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29\": rpc error: code = NotFound desc = could not find container \"9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29\": container with ID starting with 9b4c19947c05f05b5ed60aea9633c583d8b31ec1afe0f33a32b8dd828ab36d29 not found: ID does not exist" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.134059 4816 scope.go:117] "RemoveContainer" containerID="63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.134518 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1\": container with ID starting with 63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1 not found: ID does not exist" containerID="63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.134567 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1"} err="failed to get container status \"63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1\": rpc error: code = NotFound desc = could not find container \"63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1\": container with ID starting with 63d3b79d4d6060a4aca896db2a64b0bbc71c1de9b44404fc331b83c96c111ba1 not found: ID does not exist" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.150951 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.152297 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.153764 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.153798 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="7554f0b7-4174-4950-ab00-aa21ecf64b56" containerName="nova-scheduler-scheduler" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.164159 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-8fa6-account-create-update-nb7z5"] Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.168249 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-8fa6-account-create-update-nb7z5"] Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.206058 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8b7ff418-3104-4d5e-880c-bc9de7258943/ovn-northd/0.log" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.206139 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.339055 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-metrics-certs-tls-certs\") pod \"8b7ff418-3104-4d5e-880c-bc9de7258943\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.339138 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-config\") pod \"8b7ff418-3104-4d5e-880c-bc9de7258943\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.339175 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-combined-ca-bundle\") pod \"8b7ff418-3104-4d5e-880c-bc9de7258943\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.339258 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59k8q\" (UniqueName: \"kubernetes.io/projected/8b7ff418-3104-4d5e-880c-bc9de7258943-kube-api-access-59k8q\") pod \"8b7ff418-3104-4d5e-880c-bc9de7258943\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.339303 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-rundir\") pod \"8b7ff418-3104-4d5e-880c-bc9de7258943\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.339323 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-scripts\") pod \"8b7ff418-3104-4d5e-880c-bc9de7258943\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.339405 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-northd-tls-certs\") pod \"8b7ff418-3104-4d5e-880c-bc9de7258943\" (UID: \"8b7ff418-3104-4d5e-880c-bc9de7258943\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.339771 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdtxq\" (UniqueName: \"kubernetes.io/projected/283d5223-0722-43a2-bf1c-0dd876d0313c-kube-api-access-gdtxq\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.339784 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/283d5223-0722-43a2-bf1c-0dd876d0313c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.339871 4816 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.339915 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:43.339901827 +0000 UTC m=+1402.666615555 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scripts" not found Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.340735 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "8b7ff418-3104-4d5e-880c-bc9de7258943" (UID: "8b7ff418-3104-4d5e-880c-bc9de7258943"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.340791 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-scripts" (OuterVolumeSpecName: "scripts") pod "8b7ff418-3104-4d5e-880c-bc9de7258943" (UID: "8b7ff418-3104-4d5e-880c-bc9de7258943"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.341415 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-config" (OuterVolumeSpecName: "config") pod "8b7ff418-3104-4d5e-880c-bc9de7258943" (UID: "8b7ff418-3104-4d5e-880c-bc9de7258943"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.341539 4816 secret.go:188] Couldn't get secret openstack/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.341612 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:43.341578453 +0000 UTC m=+1402.668292371 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scheduler-config-data" not found Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.341671 4816 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.341694 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:43.341687006 +0000 UTC m=+1402.668400734 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-config-data" not found Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.343257 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod283d5223_0722_43a2_bf1c_0dd876d0313c.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.359460 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b7ff418-3104-4d5e-880c-bc9de7258943-kube-api-access-59k8q" (OuterVolumeSpecName: "kube-api-access-59k8q") pod "8b7ff418-3104-4d5e-880c-bc9de7258943" (UID: "8b7ff418-3104-4d5e-880c-bc9de7258943"). InnerVolumeSpecName "kube-api-access-59k8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.376973 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.385409 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.387410 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b7ff418-3104-4d5e-880c-bc9de7258943" (UID: "8b7ff418-3104-4d5e-880c-bc9de7258943"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.387479 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.387600 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="911fa155-35a3-49ee-9bc0-f10a8bac544d" containerName="nova-cell1-conductor-conductor" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.412812 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04914705-9b32-4bae-a7d7-e5bcc15337ac" path="/var/lib/kubelet/pods/04914705-9b32-4bae-a7d7-e5bcc15337ac/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.413215 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="161ac06a-fdce-4a22-b21c-d9a297bf4142" path="/var/lib/kubelet/pods/161ac06a-fdce-4a22-b21c-d9a297bf4142/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.413886 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1940a629-51c3-4dca-a26d-02080dabbd68" path="/var/lib/kubelet/pods/1940a629-51c3-4dca-a26d-02080dabbd68/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.416744 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" path="/var/lib/kubelet/pods/26c49ecf-0c54-4aa7-893f-861370b1cdbd/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.417484 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27fde082-22f6-49b6-9750-796875a2fe49" path="/var/lib/kubelet/pods/27fde082-22f6-49b6-9750-796875a2fe49/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.418136 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="283d5223-0722-43a2-bf1c-0dd876d0313c" path="/var/lib/kubelet/pods/283d5223-0722-43a2-bf1c-0dd876d0313c/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.418582 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28ab0c0d-5c1e-403d-a3d9-234a5c723884" path="/var/lib/kubelet/pods/28ab0c0d-5c1e-403d-a3d9-234a5c723884/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.424172 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" path="/var/lib/kubelet/pods/35fffc20-e4dc-43ad-8a7f-64da2e1ceebb/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.425596 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a9428a1-a54a-4e85-b898-1eac97438ba8" path="/var/lib/kubelet/pods/4a9428a1-a54a-4e85-b898-1eac97438ba8/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.427641 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="574f06c2-d10b-4b4c-b047-16f0bec03b42" path="/var/lib/kubelet/pods/574f06c2-d10b-4b4c-b047-16f0bec03b42/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.428150 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" path="/var/lib/kubelet/pods/585ce61c-bb97-4b2c-bea8-c55d06e6db79/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.436848 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" path="/var/lib/kubelet/pods/592c5e3a-06b6-4b36-8281-c5c49051c945/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.440672 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" path="/var/lib/kubelet/pods/6c7838cc-1729-405a-af1e-ad0f1b9884f7/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.441626 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-rundir\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.441641 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.441649 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7ff418-3104-4d5e-880c-bc9de7258943-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.441660 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.441729 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59k8q\" (UniqueName: \"kubernetes.io/projected/8b7ff418-3104-4d5e-880c-bc9de7258943-kube-api-access-59k8q\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.443716 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" path="/var/lib/kubelet/pods/892fbdbb-3c72-45c9-8987-4bd9a01ddf98/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.446392 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" path="/var/lib/kubelet/pods/975a9d1d-44d0-4b11-8a41-8f237da1ad85/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.448288 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfa5bec4-12b0-4788-a9d2-4dc39afd56e3" path="/var/lib/kubelet/pods/bfa5bec4-12b0-4788-a9d2-4dc39afd56e3/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.453502 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3af0ec9-22bb-4119-b349-bef284903316" path="/var/lib/kubelet/pods/e3af0ec9-22bb-4119-b349-bef284903316/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.455225 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" path="/var/lib/kubelet/pods/fd68bcb4-cb94-422a-b44a-7fd47d309f0a/volumes" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.468503 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "8b7ff418-3104-4d5e-880c-bc9de7258943" (UID: "8b7ff418-3104-4d5e-880c-bc9de7258943"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.476807 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "8b7ff418-3104-4d5e-880c-bc9de7258943" (UID: "8b7ff418-3104-4d5e-880c-bc9de7258943"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.548331 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.548540 4816 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b7ff418-3104-4d5e-880c-bc9de7258943-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.696506 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.760050 4816 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:35 crc kubenswrapper[4816]: E0216 13:26:35.760132 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data podName:ecfcee51-c740-477a-87d9-558fffc58686 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:43.760113384 +0000 UTC m=+1403.086827112 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data") pod "rabbitmq-cell1-server-0" (UID: "ecfcee51-c740-477a-87d9-558fffc58686") : configmap "rabbitmq-cell1-config-data" not found Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.927343 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-combined-ca-bundle\") pod \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.927428 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-config-data\") pod \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.927496 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqmjw\" (UniqueName: \"kubernetes.io/projected/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-kube-api-access-hqmjw\") pod \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.927532 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-credential-keys\") pod \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.927605 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-internal-tls-certs\") pod \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.927719 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-fernet-keys\") pod \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.927759 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-public-tls-certs\") pod \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.927803 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-scripts\") pod \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\" (UID: \"b7e01f36-29ae-4e7d-9dfb-c91c3f860060\") " Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.934932 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b7e01f36-29ae-4e7d-9dfb-c91c3f860060" (UID: "b7e01f36-29ae-4e7d-9dfb-c91c3f860060"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.935350 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-kube-api-access-hqmjw" (OuterVolumeSpecName: "kube-api-access-hqmjw") pod "b7e01f36-29ae-4e7d-9dfb-c91c3f860060" (UID: "b7e01f36-29ae-4e7d-9dfb-c91c3f860060"). InnerVolumeSpecName "kube-api-access-hqmjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:35 crc kubenswrapper[4816]: I0216 13:26:35.936074 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-scripts" (OuterVolumeSpecName: "scripts") pod "b7e01f36-29ae-4e7d-9dfb-c91c3f860060" (UID: "b7e01f36-29ae-4e7d-9dfb-c91c3f860060"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.010924 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-config-data" (OuterVolumeSpecName: "config-data") pod "b7e01f36-29ae-4e7d-9dfb-c91c3f860060" (UID: "b7e01f36-29ae-4e7d-9dfb-c91c3f860060"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.020010 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b7e01f36-29ae-4e7d-9dfb-c91c3f860060" (UID: "b7e01f36-29ae-4e7d-9dfb-c91c3f860060"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.037811 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqmjw\" (UniqueName: \"kubernetes.io/projected/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-kube-api-access-hqmjw\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.037852 4816 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.037864 4816 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.037876 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.037888 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.066867 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7e01f36-29ae-4e7d-9dfb-c91c3f860060" (UID: "b7e01f36-29ae-4e7d-9dfb-c91c3f860060"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: E0216 13:26:36.099149 4816 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Feb 16 13:26:36 crc kubenswrapper[4816]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-02-16T13:26:28Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Feb 16 13:26:36 crc kubenswrapper[4816]: /etc/init.d/functions: line 589: 407 Alarm clock "$@" Feb 16 13:26:36 crc kubenswrapper[4816]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-v9w6q" message=< Feb 16 13:26:36 crc kubenswrapper[4816]: Exiting ovn-controller (1) [FAILED] Feb 16 13:26:36 crc kubenswrapper[4816]: Killing ovn-controller (1) [ OK ] Feb 16 13:26:36 crc kubenswrapper[4816]: Killing ovn-controller (1) with SIGKILL [ OK ] Feb 16 13:26:36 crc kubenswrapper[4816]: 2026-02-16T13:26:28Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Feb 16 13:26:36 crc kubenswrapper[4816]: /etc/init.d/functions: line 589: 407 Alarm clock "$@" Feb 16 13:26:36 crc kubenswrapper[4816]: > Feb 16 13:26:36 crc kubenswrapper[4816]: E0216 13:26:36.099207 4816 kuberuntime_container.go:691] "PreStop hook failed" err=< Feb 16 13:26:36 crc kubenswrapper[4816]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-02-16T13:26:28Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Feb 16 13:26:36 crc kubenswrapper[4816]: /etc/init.d/functions: line 589: 407 Alarm clock "$@" Feb 16 13:26:36 crc kubenswrapper[4816]: > pod="openstack/ovn-controller-v9w6q" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" containerID="cri-o://ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.099241 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-v9w6q" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" containerID="cri-o://ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18" gracePeriod=22 Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.122396 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.157386 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.158405 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b7e01f36-29ae-4e7d-9dfb-c91c3f860060" (UID: "b7e01f36-29ae-4e7d-9dfb-c91c3f860060"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.181518 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8b7ff418-3104-4d5e-880c-bc9de7258943/ovn-northd/0.log" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.181594 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8b7ff418-3104-4d5e-880c-bc9de7258943","Type":"ContainerDied","Data":"a3ddce87225cdbd688fbac4ea5c16e69acab2028e37dc77d12a23977d0c656cb"} Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.181630 4816 scope.go:117] "RemoveContainer" containerID="69ee0cac9e4f93da6f2382337f0f124d262804d3845fd42fa8d7a742bee8220b" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.181765 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.193790 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b7e01f36-29ae-4e7d-9dfb-c91c3f860060" (UID: "b7e01f36-29ae-4e7d-9dfb-c91c3f860060"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.220992 4816 generic.go:334] "Generic (PLEG): container finished" podID="9eb39773-46a3-4f31-a95a-64a183dbe417" containerID="663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb" exitCode=0 Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.221317 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9eb39773-46a3-4f31-a95a-64a183dbe417","Type":"ContainerDied","Data":"663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb"} Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.221417 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9eb39773-46a3-4f31-a95a-64a183dbe417","Type":"ContainerDied","Data":"5e52400a84762e69c935f28ca297da68c40d2dc0bddca8c2b4ddac7267c9dd35"} Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.221596 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.233633 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.237276 4816 generic.go:334] "Generic (PLEG): container finished" podID="ecfcee51-c740-477a-87d9-558fffc58686" containerID="2fdeaad8597fbc86132995a2af976c1b8f4746137b8a80f5965aad1ee988bc45" exitCode=0 Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.237363 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfcee51-c740-477a-87d9-558fffc58686","Type":"ContainerDied","Data":"2fdeaad8597fbc86132995a2af976c1b8f4746137b8a80f5965aad1ee988bc45"} Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.242809 4816 generic.go:334] "Generic (PLEG): container finished" podID="b7e01f36-29ae-4e7d-9dfb-c91c3f860060" containerID="98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb" exitCode=0 Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.242838 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5b47f74bcc-6q6s6" event={"ID":"b7e01f36-29ae-4e7d-9dfb-c91c3f860060","Type":"ContainerDied","Data":"98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb"} Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.242856 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5b47f74bcc-6q6s6" event={"ID":"b7e01f36-29ae-4e7d-9dfb-c91c3f860060","Type":"ContainerDied","Data":"fd798b75e708a13c82512a3ccace98fe96fd081fb5dffe8646c94bb8ebeb6d14"} Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.242903 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5b47f74bcc-6q6s6" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.245501 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.258473 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-erlang-cookie\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259158 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9eb39773-46a3-4f31-a95a-64a183dbe417-pod-info\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259298 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259368 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72r48\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-kube-api-access-72r48\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259451 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259471 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-server-conf\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259577 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-plugins-conf\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259613 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259645 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-plugins\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259684 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-tls\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259700 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-confd\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259721 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9eb39773-46a3-4f31-a95a-64a183dbe417-erlang-cookie-secret\") pod \"9eb39773-46a3-4f31-a95a-64a183dbe417\" (UID: \"9eb39773-46a3-4f31-a95a-64a183dbe417\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259958 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259976 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7e01f36-29ae-4e7d-9dfb-c91c3f860060-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.259985 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.263036 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eb39773-46a3-4f31-a95a-64a183dbe417-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.263736 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.267155 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.267332 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.269384 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.274201 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/9eb39773-46a3-4f31-a95a-64a183dbe417-pod-info" (OuterVolumeSpecName: "pod-info") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.275061 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-kube-api-access-72r48" (OuterVolumeSpecName: "kube-api-access-72r48") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "kube-api-access-72r48". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.291376 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.292092 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data" (OuterVolumeSpecName: "config-data") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.346064 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5b47f74bcc-6q6s6"] Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.347275 4816 scope.go:117] "RemoveContainer" containerID="3ab81f6efce6fb86362c42eda7876dd469e8113fb561b222ea56c2868f292aeb" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.361118 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.361151 4816 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9eb39773-46a3-4f31-a95a-64a183dbe417-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.361166 4816 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9eb39773-46a3-4f31-a95a-64a183dbe417-pod-info\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.361181 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.361248 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72r48\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-kube-api-access-72r48\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.361276 4816 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.361306 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.361320 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.366187 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5b47f74bcc-6q6s6"] Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.406785 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.408711 4816 scope.go:117] "RemoveContainer" containerID="663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.418533 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.431587 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-server-conf" (OuterVolumeSpecName: "server-conf") pod "9eb39773-46a3-4f31-a95a-64a183dbe417" (UID: "9eb39773-46a3-4f31-a95a-64a183dbe417"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.462768 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.462910 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7wh5\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-kube-api-access-v7wh5\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.462937 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-erlang-cookie\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.462993 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ecfcee51-c740-477a-87d9-558fffc58686-pod-info\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.463047 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-plugins\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.463520 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.463762 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-plugins-conf\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.463835 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-confd\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.463869 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.463919 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-server-conf\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.464098 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ecfcee51-c740-477a-87d9-558fffc58686-erlang-cookie-secret\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.464181 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-tls\") pod \"ecfcee51-c740-477a-87d9-558fffc58686\" (UID: \"ecfcee51-c740-477a-87d9-558fffc58686\") " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.464326 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.464786 4816 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9eb39773-46a3-4f31-a95a-64a183dbe417-server-conf\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.464808 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.464822 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.464866 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9eb39773-46a3-4f31-a95a-64a183dbe417-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.464880 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.467973 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.469971 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecfcee51-c740-477a-87d9-558fffc58686-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.470048 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.473602 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.478207 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-kube-api-access-v7wh5" (OuterVolumeSpecName: "kube-api-access-v7wh5") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "kube-api-access-v7wh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.482834 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/ecfcee51-c740-477a-87d9-558fffc58686-pod-info" (OuterVolumeSpecName: "pod-info") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.567074 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data" (OuterVolumeSpecName: "config-data") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.569409 4816 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.569429 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.569442 4816 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ecfcee51-c740-477a-87d9-558fffc58686-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.569453 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.569479 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.569491 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7wh5\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-kube-api-access-v7wh5\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.569506 4816 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ecfcee51-c740-477a-87d9-558fffc58686-pod-info\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.602159 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.633398 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-server-conf" (OuterVolumeSpecName: "server-conf") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.670959 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "ecfcee51-c740-477a-87d9-558fffc58686" (UID: "ecfcee51-c740-477a-87d9-558fffc58686"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.671342 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ecfcee51-c740-477a-87d9-558fffc58686-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.671374 4816 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ecfcee51-c740-477a-87d9-558fffc58686-server-conf\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.671385 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.733980 4816 scope.go:117] "RemoveContainer" containerID="1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.735201 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.749466 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.788323 4816 scope.go:117] "RemoveContainer" containerID="663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb" Feb 16 13:26:36 crc kubenswrapper[4816]: E0216 13:26:36.789067 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb\": container with ID starting with 663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb not found: ID does not exist" containerID="663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.789111 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb"} err="failed to get container status \"663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb\": rpc error: code = NotFound desc = could not find container \"663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb\": container with ID starting with 663a9308fbe5ebc3975af72b8a39f1c7bc92d17d1041434da5671dd5735d39fb not found: ID does not exist" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.789137 4816 scope.go:117] "RemoveContainer" containerID="1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646" Feb 16 13:26:36 crc kubenswrapper[4816]: E0216 13:26:36.789539 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646\": container with ID starting with 1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646 not found: ID does not exist" containerID="1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.789603 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646"} err="failed to get container status \"1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646\": rpc error: code = NotFound desc = could not find container \"1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646\": container with ID starting with 1a6f76bf38fc55cf0c7e4b05dd27dbe0f0b4e0d8e9cccb67514a0b1c39d7e646 not found: ID does not exist" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.789666 4816 scope.go:117] "RemoveContainer" containerID="98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.830880 4816 scope.go:117] "RemoveContainer" containerID="98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb" Feb 16 13:26:36 crc kubenswrapper[4816]: E0216 13:26:36.832944 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb\": container with ID starting with 98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb not found: ID does not exist" containerID="98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb" Feb 16 13:26:36 crc kubenswrapper[4816]: I0216 13:26:36.832977 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb"} err="failed to get container status \"98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb\": rpc error: code = NotFound desc = could not find container \"98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb\": container with ID starting with 98a2f34964c3d35be2ee853db1847e74fa24481e5a13eb95d3f4f1b51bc465cb not found: ID does not exist" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.012575 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-v9w6q_d9c2b7b7-d48b-41e5-9591-5b5470cfca1e/ovn-controller/0.log" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.012902 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q" Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.064959 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.065403 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.065582 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.065850 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.065948 4816 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.067170 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.069088 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.069174 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.080286 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-scripts\") pod \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.080361 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run-ovn\") pod \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.080449 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-log-ovn\") pod \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.080521 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-combined-ca-bundle\") pod \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.080567 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-ovn-controller-tls-certs\") pod \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.080649 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run\") pod \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.080703 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfh74\" (UniqueName: \"kubernetes.io/projected/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-kube-api-access-zfh74\") pod \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\" (UID: \"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.080851 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" (UID: "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.081103 4816 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.081207 4816 secret.go:188] Couldn't get secret openstack/nova-cell1-conductor-config-data: secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.081267 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data podName:911fa155-35a3-49ee-9bc0-f10a8bac544d nodeName:}" failed. No retries permitted until 2026-02-16 13:26:45.081252158 +0000 UTC m=+1404.407965886 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data") pod "nova-cell1-conductor-0" (UID: "911fa155-35a3-49ee-9bc0-f10a8bac544d") : secret "nova-cell1-conductor-config-data" not found Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.081881 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run" (OuterVolumeSpecName: "var-run") pod "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" (UID: "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.082556 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-scripts" (OuterVolumeSpecName: "scripts") pod "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" (UID: "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.082571 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" (UID: "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.213503 4816 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.213530 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.213539 4816 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.232534 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-kube-api-access-zfh74" (OuterVolumeSpecName: "kube-api-access-zfh74") pod "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" (UID: "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e"). InnerVolumeSpecName "kube-api-access-zfh74". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.240908 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" (UID: "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.259489 4816 generic.go:334] "Generic (PLEG): container finished" podID="b266490b-4d0a-4463-8818-2bcdc39cdf88" containerID="61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437" exitCode=0 Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.259586 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b266490b-4d0a-4463-8818-2bcdc39cdf88","Type":"ContainerDied","Data":"61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437"} Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.263165 4816 generic.go:334] "Generic (PLEG): container finished" podID="7554f0b7-4174-4950-ab00-aa21ecf64b56" containerID="bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663" exitCode=0 Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.263203 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7554f0b7-4174-4950-ab00-aa21ecf64b56","Type":"ContainerDied","Data":"bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663"} Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.265531 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-v9w6q_d9c2b7b7-d48b-41e5-9591-5b5470cfca1e/ovn-controller/0.log" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.265567 4816 generic.go:334] "Generic (PLEG): container finished" podID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerID="ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18" exitCode=137 Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.265610 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q" event={"ID":"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e","Type":"ContainerDied","Data":"ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18"} Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.265631 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v9w6q" event={"ID":"d9c2b7b7-d48b-41e5-9591-5b5470cfca1e","Type":"ContainerDied","Data":"99f7dbff425ca678b68c841d68edab0cb2552f423d0e4f16f7d69943fea9e9e3"} Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.265668 4816 scope.go:117] "RemoveContainer" containerID="ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.265842 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v9w6q" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.276953 4816 generic.go:334] "Generic (PLEG): container finished" podID="911fa155-35a3-49ee-9bc0-f10a8bac544d" containerID="67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213" exitCode=0 Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.277045 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"911fa155-35a3-49ee-9bc0-f10a8bac544d","Type":"ContainerDied","Data":"67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213"} Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.278731 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfcee51-c740-477a-87d9-558fffc58686","Type":"ContainerDied","Data":"0531cfe15c23253e9c798537b38b9f448a51443d71b2b9b500234d577f77b5ff"} Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.278816 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.311049 4816 scope.go:117] "RemoveContainer" containerID="ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.311079 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" (UID: "d9c2b7b7-d48b-41e5-9591-5b5470cfca1e"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: E0216 13:26:37.311772 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18\": container with ID starting with ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18 not found: ID does not exist" containerID="ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.311811 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18"} err="failed to get container status \"ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18\": rpc error: code = NotFound desc = could not find container \"ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18\": container with ID starting with ab9de7672ee2c84dcf17a056e61b597d10b5ec8bd06844ffce0b8ba6aa61ff18 not found: ID does not exist" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.311835 4816 scope.go:117] "RemoveContainer" containerID="2fdeaad8597fbc86132995a2af976c1b8f4746137b8a80f5965aad1ee988bc45" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.319618 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.319672 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.319683 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfh74\" (UniqueName: \"kubernetes.io/projected/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e-kube-api-access-zfh74\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.337449 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.347182 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.362520 4816 scope.go:117] "RemoveContainer" containerID="be23562396e8deb7c2fcd78ad08cf2775995e40b6eb695892ea897a7e1bfb880" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.383033 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.414238 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" path="/var/lib/kubelet/pods/8b7ff418-3104-4d5e-880c-bc9de7258943/volumes" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.415207 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9eb39773-46a3-4f31-a95a-64a183dbe417" path="/var/lib/kubelet/pods/9eb39773-46a3-4f31-a95a-64a183dbe417/volumes" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.415970 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7e01f36-29ae-4e7d-9dfb-c91c3f860060" path="/var/lib/kubelet/pods/b7e01f36-29ae-4e7d-9dfb-c91c3f860060/volumes" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.417590 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecfcee51-c740-477a-87d9-558fffc58686" path="/var/lib/kubelet/pods/ecfcee51-c740-477a-87d9-558fffc58686/volumes" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.527614 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-combined-ca-bundle\") pod \"7554f0b7-4174-4950-ab00-aa21ecf64b56\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.527763 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-config-data\") pod \"7554f0b7-4174-4950-ab00-aa21ecf64b56\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.527825 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9qvl\" (UniqueName: \"kubernetes.io/projected/7554f0b7-4174-4950-ab00-aa21ecf64b56-kube-api-access-q9qvl\") pod \"7554f0b7-4174-4950-ab00-aa21ecf64b56\" (UID: \"7554f0b7-4174-4950-ab00-aa21ecf64b56\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.532928 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7554f0b7-4174-4950-ab00-aa21ecf64b56-kube-api-access-q9qvl" (OuterVolumeSpecName: "kube-api-access-q9qvl") pod "7554f0b7-4174-4950-ab00-aa21ecf64b56" (UID: "7554f0b7-4174-4950-ab00-aa21ecf64b56"). InnerVolumeSpecName "kube-api-access-q9qvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.552448 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7554f0b7-4174-4950-ab00-aa21ecf64b56" (UID: "7554f0b7-4174-4950-ab00-aa21ecf64b56"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.555286 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-config-data" (OuterVolumeSpecName: "config-data") pod "7554f0b7-4174-4950-ab00-aa21ecf64b56" (UID: "7554f0b7-4174-4950-ab00-aa21ecf64b56"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.588969 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5b75bc5544-lb94h" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.589119 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5b75bc5544-lb94h" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.591201 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.596280 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.612624 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-v9w6q"] Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.621119 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-v9w6q"] Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.630577 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-combined-ca-bundle\") pod \"b266490b-4d0a-4463-8818-2bcdc39cdf88\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.630724 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnr64\" (UniqueName: \"kubernetes.io/projected/b266490b-4d0a-4463-8818-2bcdc39cdf88-kube-api-access-vnr64\") pod \"b266490b-4d0a-4463-8818-2bcdc39cdf88\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.630750 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data\") pod \"911fa155-35a3-49ee-9bc0-f10a8bac544d\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.630776 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qmxh\" (UniqueName: \"kubernetes.io/projected/911fa155-35a3-49ee-9bc0-f10a8bac544d-kube-api-access-9qmxh\") pod \"911fa155-35a3-49ee-9bc0-f10a8bac544d\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.630811 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-config-data\") pod \"b266490b-4d0a-4463-8818-2bcdc39cdf88\" (UID: \"b266490b-4d0a-4463-8818-2bcdc39cdf88\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.630855 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-combined-ca-bundle\") pod \"911fa155-35a3-49ee-9bc0-f10a8bac544d\" (UID: \"911fa155-35a3-49ee-9bc0-f10a8bac544d\") " Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.631107 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.631123 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7554f0b7-4174-4950-ab00-aa21ecf64b56-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.631133 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9qvl\" (UniqueName: \"kubernetes.io/projected/7554f0b7-4174-4950-ab00-aa21ecf64b56-kube-api-access-q9qvl\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.635976 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b266490b-4d0a-4463-8818-2bcdc39cdf88-kube-api-access-vnr64" (OuterVolumeSpecName: "kube-api-access-vnr64") pod "b266490b-4d0a-4463-8818-2bcdc39cdf88" (UID: "b266490b-4d0a-4463-8818-2bcdc39cdf88"). InnerVolumeSpecName "kube-api-access-vnr64". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.639897 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/911fa155-35a3-49ee-9bc0-f10a8bac544d-kube-api-access-9qmxh" (OuterVolumeSpecName: "kube-api-access-9qmxh") pod "911fa155-35a3-49ee-9bc0-f10a8bac544d" (UID: "911fa155-35a3-49ee-9bc0-f10a8bac544d"). InnerVolumeSpecName "kube-api-access-9qmxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.652410 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b266490b-4d0a-4463-8818-2bcdc39cdf88" (UID: "b266490b-4d0a-4463-8818-2bcdc39cdf88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.668129 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-config-data" (OuterVolumeSpecName: "config-data") pod "b266490b-4d0a-4463-8818-2bcdc39cdf88" (UID: "b266490b-4d0a-4463-8818-2bcdc39cdf88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.670549 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data" (OuterVolumeSpecName: "config-data") pod "911fa155-35a3-49ee-9bc0-f10a8bac544d" (UID: "911fa155-35a3-49ee-9bc0-f10a8bac544d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.675583 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "911fa155-35a3-49ee-9bc0-f10a8bac544d" (UID: "911fa155-35a3-49ee-9bc0-f10a8bac544d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.732149 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.732180 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.732192 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnr64\" (UniqueName: \"kubernetes.io/projected/b266490b-4d0a-4463-8818-2bcdc39cdf88-kube-api-access-vnr64\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.732204 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911fa155-35a3-49ee-9bc0-f10a8bac544d-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.732215 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qmxh\" (UniqueName: \"kubernetes.io/projected/911fa155-35a3-49ee-9bc0-f10a8bac544d-kube-api-access-9qmxh\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:37 crc kubenswrapper[4816]: I0216 13:26:37.732224 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b266490b-4d0a-4463-8818-2bcdc39cdf88-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.006786 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.177159 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-ceilometer-tls-certs\") pod \"9da68b25-a924-4d2e-82a3-c635014f32e9\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.177198 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-scripts\") pod \"9da68b25-a924-4d2e-82a3-c635014f32e9\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.177284 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-log-httpd\") pod \"9da68b25-a924-4d2e-82a3-c635014f32e9\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.177318 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-config-data\") pod \"9da68b25-a924-4d2e-82a3-c635014f32e9\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.177386 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltqzd\" (UniqueName: \"kubernetes.io/projected/9da68b25-a924-4d2e-82a3-c635014f32e9-kube-api-access-ltqzd\") pod \"9da68b25-a924-4d2e-82a3-c635014f32e9\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.177433 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-run-httpd\") pod \"9da68b25-a924-4d2e-82a3-c635014f32e9\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.177462 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-combined-ca-bundle\") pod \"9da68b25-a924-4d2e-82a3-c635014f32e9\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.177476 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-sg-core-conf-yaml\") pod \"9da68b25-a924-4d2e-82a3-c635014f32e9\" (UID: \"9da68b25-a924-4d2e-82a3-c635014f32e9\") " Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.178228 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9da68b25-a924-4d2e-82a3-c635014f32e9" (UID: "9da68b25-a924-4d2e-82a3-c635014f32e9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.178541 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9da68b25-a924-4d2e-82a3-c635014f32e9" (UID: "9da68b25-a924-4d2e-82a3-c635014f32e9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.180950 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-scripts" (OuterVolumeSpecName: "scripts") pod "9da68b25-a924-4d2e-82a3-c635014f32e9" (UID: "9da68b25-a924-4d2e-82a3-c635014f32e9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.181298 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9da68b25-a924-4d2e-82a3-c635014f32e9-kube-api-access-ltqzd" (OuterVolumeSpecName: "kube-api-access-ltqzd") pod "9da68b25-a924-4d2e-82a3-c635014f32e9" (UID: "9da68b25-a924-4d2e-82a3-c635014f32e9"). InnerVolumeSpecName "kube-api-access-ltqzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.199557 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9da68b25-a924-4d2e-82a3-c635014f32e9" (UID: "9da68b25-a924-4d2e-82a3-c635014f32e9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.223382 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "9da68b25-a924-4d2e-82a3-c635014f32e9" (UID: "9da68b25-a924-4d2e-82a3-c635014f32e9"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.246514 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9da68b25-a924-4d2e-82a3-c635014f32e9" (UID: "9da68b25-a924-4d2e-82a3-c635014f32e9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.278701 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.278922 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.278932 4816 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.278942 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.278951 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.278960 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltqzd\" (UniqueName: \"kubernetes.io/projected/9da68b25-a924-4d2e-82a3-c635014f32e9-kube-api-access-ltqzd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.279014 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9da68b25-a924-4d2e-82a3-c635014f32e9-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.281157 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-config-data" (OuterVolumeSpecName: "config-data") pod "9da68b25-a924-4d2e-82a3-c635014f32e9" (UID: "9da68b25-a924-4d2e-82a3-c635014f32e9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.292519 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"911fa155-35a3-49ee-9bc0-f10a8bac544d","Type":"ContainerDied","Data":"fe9a20a39c017c7460f582da4b5d8a0033574c8d81f78048527e45c651561ea2"} Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.292590 4816 scope.go:117] "RemoveContainer" containerID="67049ffc6b3577941dd38268ad30ed96bbd4af82696759de96d4ad394cb67213" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.292728 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.385620 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9da68b25-a924-4d2e-82a3-c635014f32e9-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.392316 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.392315 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b266490b-4d0a-4463-8818-2bcdc39cdf88","Type":"ContainerDied","Data":"ab94b4f69abff1a90ad8843f5b4ab047033b7c787703f3006373bef714faf201"} Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.398295 4816 generic.go:334] "Generic (PLEG): container finished" podID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerID="f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08" exitCode=0 Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.398426 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.399048 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerDied","Data":"f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08"} Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.399090 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9da68b25-a924-4d2e-82a3-c635014f32e9","Type":"ContainerDied","Data":"e615eee864ce675143a21ef09bc04749ff7242fd1df9e2795e1e1b28f6b63bda"} Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.401589 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7554f0b7-4174-4950-ab00-aa21ecf64b56","Type":"ContainerDied","Data":"9d56845240197e82d29460a91a83ca0d499e76a7a52f250d63481d6050300cd4"} Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.401685 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.433140 4816 scope.go:117] "RemoveContainer" containerID="61ab90f58f8866ebfcd59d401b363793f45334f29d0d840564a7d1800d930437" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.436300 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.445181 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.463783 4816 scope.go:117] "RemoveContainer" containerID="daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.474721 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.480073 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.488240 4816 scope.go:117] "RemoveContainer" containerID="d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.491069 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.509474 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.520021 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.525500 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.527884 4816 scope.go:117] "RemoveContainer" containerID="f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.557562 4816 scope.go:117] "RemoveContainer" containerID="f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.580162 4816 scope.go:117] "RemoveContainer" containerID="daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552" Feb 16 13:26:38 crc kubenswrapper[4816]: E0216 13:26:38.580839 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552\": container with ID starting with daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552 not found: ID does not exist" containerID="daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.580876 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552"} err="failed to get container status \"daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552\": rpc error: code = NotFound desc = could not find container \"daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552\": container with ID starting with daf8bf1a377b8f8e381629539ee9cfa74c324078fadee36825681f48fa40e552 not found: ID does not exist" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.580899 4816 scope.go:117] "RemoveContainer" containerID="d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c" Feb 16 13:26:38 crc kubenswrapper[4816]: E0216 13:26:38.581387 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c\": container with ID starting with d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c not found: ID does not exist" containerID="d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.581414 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c"} err="failed to get container status \"d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c\": rpc error: code = NotFound desc = could not find container \"d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c\": container with ID starting with d03cd633a5e12f84c0f79e1413a88905d629426f55387c1643641d5e8e73db4c not found: ID does not exist" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.581432 4816 scope.go:117] "RemoveContainer" containerID="f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08" Feb 16 13:26:38 crc kubenswrapper[4816]: E0216 13:26:38.581711 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08\": container with ID starting with f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08 not found: ID does not exist" containerID="f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.581739 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08"} err="failed to get container status \"f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08\": rpc error: code = NotFound desc = could not find container \"f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08\": container with ID starting with f41669d69fbe9f242e9807a409cf7779675f90cce8b75509e10a37bc5b980a08 not found: ID does not exist" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.581754 4816 scope.go:117] "RemoveContainer" containerID="f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c" Feb 16 13:26:38 crc kubenswrapper[4816]: E0216 13:26:38.582066 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c\": container with ID starting with f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c not found: ID does not exist" containerID="f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.582085 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c"} err="failed to get container status \"f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c\": rpc error: code = NotFound desc = could not find container \"f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c\": container with ID starting with f6148dbda95b5ea9894d92a577c291a1f4d3e825db06331190437f07fb6d067c not found: ID does not exist" Feb 16 13:26:38 crc kubenswrapper[4816]: I0216 13:26:38.582098 4816 scope.go:117] "RemoveContainer" containerID="bf2e2c0b74764b1a526a6efc278816c30539f542c226c270ded5241e7a95b663" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.135575 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-8ff9ccb6f-bwqh8" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.159:9696/\": dial tcp 10.217.0.159:9696: connect: connection refused" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.410268 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7554f0b7-4174-4950-ab00-aa21ecf64b56" path="/var/lib/kubelet/pods/7554f0b7-4174-4950-ab00-aa21ecf64b56/volumes" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.412877 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="911fa155-35a3-49ee-9bc0-f10a8bac544d" path="/var/lib/kubelet/pods/911fa155-35a3-49ee-9bc0-f10a8bac544d/volumes" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.414272 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" path="/var/lib/kubelet/pods/9da68b25-a924-4d2e-82a3-c635014f32e9/volumes" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.416998 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b266490b-4d0a-4463-8818-2bcdc39cdf88" path="/var/lib/kubelet/pods/b266490b-4d0a-4463-8818-2bcdc39cdf88/volumes" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.418514 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" path="/var/lib/kubelet/pods/d9c2b7b7-d48b-41e5-9591-5b5470cfca1e/volumes" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.450925 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-79lxg"] Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451325 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451343 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451352 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eb39773-46a3-4f31-a95a-64a183dbe417" containerName="setup-container" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451357 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eb39773-46a3-4f31-a95a-64a183dbe417" containerName="setup-container" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451378 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1940a629-51c3-4dca-a26d-02080dabbd68" containerName="glance-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451387 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1940a629-51c3-4dca-a26d-02080dabbd68" containerName="glance-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451396 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eb39773-46a3-4f31-a95a-64a183dbe417" containerName="rabbitmq" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451402 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eb39773-46a3-4f31-a95a-64a183dbe417" containerName="rabbitmq" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451412 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="openstack-network-exporter" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451418 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="openstack-network-exporter" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451426 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerName="cinder-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451432 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerName="cinder-api" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451439 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="ovn-northd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451444 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="ovn-northd" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451452 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerName="cinder-api-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451458 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerName="cinder-api-log" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451464 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-server" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451469 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-server" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451477 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="911fa155-35a3-49ee-9bc0-f10a8bac544d" containerName="nova-cell1-conductor-conductor" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451483 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="911fa155-35a3-49ee-9bc0-f10a8bac544d" containerName="nova-cell1-conductor-conductor" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451495 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451502 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-log" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451510 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerName="glance-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451517 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerName="glance-log" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451530 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecfcee51-c740-477a-87d9-558fffc58686" containerName="setup-container" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451537 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecfcee51-c740-477a-87d9-558fffc58686" containerName="setup-container" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451548 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="sg-core" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451555 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="sg-core" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451565 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="ceilometer-central-agent" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451571 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="ceilometer-central-agent" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451582 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a9428a1-a54a-4e85-b898-1eac97438ba8" containerName="mysql-bootstrap" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451588 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a9428a1-a54a-4e85-b898-1eac97438ba8" containerName="mysql-bootstrap" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451599 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451604 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api-log" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451618 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecfcee51-c740-477a-87d9-558fffc58686" containerName="rabbitmq" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451624 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecfcee51-c740-477a-87d9-558fffc58686" containerName="rabbitmq" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451634 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451641 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451671 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b266490b-4d0a-4463-8818-2bcdc39cdf88" containerName="nova-cell0-conductor-conductor" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451677 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b266490b-4d0a-4463-8818-2bcdc39cdf88" containerName="nova-cell0-conductor-conductor" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451685 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerName="placement-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451691 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerName="placement-api" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451716 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerName="placement-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451724 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerName="placement-log" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451735 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerName="barbican-worker-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451741 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerName="barbican-worker-log" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451747 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="ceilometer-notification-agent" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451754 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="ceilometer-notification-agent" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451761 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-metadata" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451767 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-metadata" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451776 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1940a629-51c3-4dca-a26d-02080dabbd68" containerName="glance-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451782 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1940a629-51c3-4dca-a26d-02080dabbd68" containerName="glance-log" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451793 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28ab0c0d-5c1e-403d-a3d9-234a5c723884" containerName="kube-state-metrics" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451800 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="28ab0c0d-5c1e-403d-a3d9-234a5c723884" containerName="kube-state-metrics" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451811 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451816 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-log" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451824 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7554f0b7-4174-4950-ab00-aa21ecf64b56" containerName="nova-scheduler-scheduler" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451832 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7554f0b7-4174-4950-ab00-aa21ecf64b56" containerName="nova-scheduler-scheduler" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451843 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="proxy-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451850 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="proxy-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451861 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451867 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451876 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="161ac06a-fdce-4a22-b21c-d9a297bf4142" containerName="mariadb-account-create-update" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451881 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="161ac06a-fdce-4a22-b21c-d9a297bf4142" containerName="mariadb-account-create-update" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451888 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerName="barbican-worker" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451894 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerName="barbican-worker" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451908 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7e01f36-29ae-4e7d-9dfb-c91c3f860060" containerName="keystone-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451914 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7e01f36-29ae-4e7d-9dfb-c91c3f860060" containerName="keystone-api" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451922 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerName="glance-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451927 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerName="glance-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.451934 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.451940 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-api" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.453100 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a9428a1-a54a-4e85-b898-1eac97438ba8" containerName="galera" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453113 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a9428a1-a54a-4e85-b898-1eac97438ba8" containerName="galera" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.453127 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" containerName="memcached" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453135 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" containerName="memcached" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453315 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="28ab0c0d-5c1e-403d-a3d9-234a5c723884" containerName="kube-state-metrics" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453328 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerName="glance-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453341 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453351 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerName="placement-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453362 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-server" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453373 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eb39773-46a3-4f31-a95a-64a183dbe417" containerName="rabbitmq" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453388 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="161ac06a-fdce-4a22-b21c-d9a297bf4142" containerName="mariadb-account-create-update" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453397 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="35fffc20-e4dc-43ad-8a7f-64da2e1ceebb" containerName="memcached" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453407 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="ceilometer-notification-agent" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453417 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a9428a1-a54a-4e85-b898-1eac97438ba8" containerName="galera" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453424 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="911fa155-35a3-49ee-9bc0-f10a8bac544d" containerName="nova-cell1-conductor-conductor" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453438 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7554f0b7-4174-4950-ab00-aa21ecf64b56" containerName="nova-scheduler-scheduler" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453452 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453460 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7e01f36-29ae-4e7d-9dfb-c91c3f860060" containerName="keystone-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453472 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1940a629-51c3-4dca-a26d-02080dabbd68" containerName="glance-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453487 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-metadata" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453498 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="ceilometer-central-agent" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453512 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9c2b7b7-d48b-41e5-9591-5b5470cfca1e" containerName="ovn-controller" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453525 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="ovn-northd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453535 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1940a629-51c3-4dca-a26d-02080dabbd68" containerName="glance-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453542 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerName="barbican-worker" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453551 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="sg-core" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453559 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c7838cc-1729-405a-af1e-ad0f1b9884f7" containerName="glance-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453569 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="585ce61c-bb97-4b2c-bea8-c55d06e6db79" containerName="barbican-worker-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453580 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerName="cinder-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453592 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="26c49ecf-0c54-4aa7-893f-861370b1cdbd" containerName="placement-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453599 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9da68b25-a924-4d2e-82a3-c635014f32e9" containerName="proxy-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453608 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b7ff418-3104-4d5e-880c-bc9de7258943" containerName="openstack-network-exporter" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453618 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="b266490b-4d0a-4463-8818-2bcdc39cdf88" containerName="nova-cell0-conductor-conductor" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453632 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd68bcb4-cb94-422a-b44a-7fd47d309f0a" containerName="proxy-httpd" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453644 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="161ac06a-fdce-4a22-b21c-d9a297bf4142" containerName="mariadb-account-create-update" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453671 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="27fde082-22f6-49b6-9750-796875a2fe49" containerName="nova-api-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453680 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="975a9d1d-44d0-4b11-8a41-8f237da1ad85" containerName="cinder-api-log" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453691 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="592c5e3a-06b6-4b36-8281-c5c49051c945" containerName="barbican-api" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453704 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecfcee51-c740-477a-87d9-558fffc58686" containerName="rabbitmq" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453715 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="892fbdbb-3c72-45c9-8987-4bd9a01ddf98" containerName="nova-metadata-log" Feb 16 13:26:39 crc kubenswrapper[4816]: E0216 13:26:39.453951 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="161ac06a-fdce-4a22-b21c-d9a297bf4142" containerName="mariadb-account-create-update" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.453964 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="161ac06a-fdce-4a22-b21c-d9a297bf4142" containerName="mariadb-account-create-update" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.479523 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.547999 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-79lxg"] Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.604855 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-catalog-content\") pod \"redhat-operators-79lxg\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.604917 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54hkz\" (UniqueName: \"kubernetes.io/projected/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-kube-api-access-54hkz\") pod \"redhat-operators-79lxg\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.605019 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-utilities\") pod \"redhat-operators-79lxg\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.705866 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-utilities\") pod \"redhat-operators-79lxg\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.705932 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-catalog-content\") pod \"redhat-operators-79lxg\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.705964 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54hkz\" (UniqueName: \"kubernetes.io/projected/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-kube-api-access-54hkz\") pod \"redhat-operators-79lxg\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.706783 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-utilities\") pod \"redhat-operators-79lxg\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.707059 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-catalog-content\") pod \"redhat-operators-79lxg\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.729200 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54hkz\" (UniqueName: \"kubernetes.io/projected/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-kube-api-access-54hkz\") pod \"redhat-operators-79lxg\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:39 crc kubenswrapper[4816]: I0216 13:26:39.872129 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:40 crc kubenswrapper[4816]: I0216 13:26:40.650588 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-79lxg"] Feb 16 13:26:41 crc kubenswrapper[4816]: I0216 13:26:41.436382 4816 generic.go:334] "Generic (PLEG): container finished" podID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerID="c762c9dddd8c167844d13e117df3ab2d2eaf2e7589c1a20941d53f7d9ccfa2f3" exitCode=0 Feb 16 13:26:41 crc kubenswrapper[4816]: I0216 13:26:41.436427 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-79lxg" event={"ID":"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc","Type":"ContainerDied","Data":"c762c9dddd8c167844d13e117df3ab2d2eaf2e7589c1a20941d53f7d9ccfa2f3"} Feb 16 13:26:41 crc kubenswrapper[4816]: I0216 13:26:41.436454 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-79lxg" event={"ID":"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc","Type":"ContainerStarted","Data":"7efafeb555a4553e94b33a0cfa746c7d8937aa09c6bb064fa49b59436c242e22"} Feb 16 13:26:42 crc kubenswrapper[4816]: E0216 13:26:42.064975 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:42 crc kubenswrapper[4816]: E0216 13:26:42.066606 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:42 crc kubenswrapper[4816]: E0216 13:26:42.066668 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:42 crc kubenswrapper[4816]: E0216 13:26:42.067135 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:42 crc kubenswrapper[4816]: E0216 13:26:42.067175 4816 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" Feb 16 13:26:42 crc kubenswrapper[4816]: E0216 13:26:42.068973 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:42 crc kubenswrapper[4816]: E0216 13:26:42.073708 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:42 crc kubenswrapper[4816]: E0216 13:26:42.073895 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" Feb 16 13:26:42 crc kubenswrapper[4816]: I0216 13:26:42.446105 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-79lxg" event={"ID":"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc","Type":"ContainerStarted","Data":"9267d37793171e1f7565a8239d5140fb440c4fc1c97cae3245f7cb9f009be472"} Feb 16 13:26:43 crc kubenswrapper[4816]: E0216 13:26:43.395411 4816 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Feb 16 13:26:43 crc kubenswrapper[4816]: E0216 13:26:43.395505 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:59.395486416 +0000 UTC m=+1418.722200204 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-config-data" not found Feb 16 13:26:43 crc kubenswrapper[4816]: E0216 13:26:43.395416 4816 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Feb 16 13:26:43 crc kubenswrapper[4816]: E0216 13:26:43.395576 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:59.395560328 +0000 UTC m=+1418.722274056 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scripts" not found Feb 16 13:26:43 crc kubenswrapper[4816]: E0216 13:26:43.395620 4816 secret.go:188] Couldn't get secret openstack/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Feb 16 13:26:43 crc kubenswrapper[4816]: E0216 13:26:43.395803 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom podName:59470ba6-bdc1-455a-abeb-f0757dcba5f6 nodeName:}" failed. No retries permitted until 2026-02-16 13:26:59.395744483 +0000 UTC m=+1418.722458251 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom") pod "cinder-scheduler-0" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6") : secret "cinder-scheduler-config-data" not found Feb 16 13:26:43 crc kubenswrapper[4816]: I0216 13:26:43.457492 4816 generic.go:334] "Generic (PLEG): container finished" podID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerID="9267d37793171e1f7565a8239d5140fb440c4fc1c97cae3245f7cb9f009be472" exitCode=0 Feb 16 13:26:43 crc kubenswrapper[4816]: I0216 13:26:43.457550 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-79lxg" event={"ID":"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc","Type":"ContainerDied","Data":"9267d37793171e1f7565a8239d5140fb440c4fc1c97cae3245f7cb9f009be472"} Feb 16 13:26:44 crc kubenswrapper[4816]: I0216 13:26:44.481526 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-79lxg" event={"ID":"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc","Type":"ContainerStarted","Data":"b00a7c86ddba637b2b33086d579ef21f75b5e65d9184acf13ae7d2382834b246"} Feb 16 13:26:44 crc kubenswrapper[4816]: I0216 13:26:44.502486 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-79lxg" podStartSLOduration=3.117129491 podStartE2EDuration="5.50246954s" podCreationTimestamp="2026-02-16 13:26:39 +0000 UTC" firstStartedPulling="2026-02-16 13:26:41.438005562 +0000 UTC m=+1400.764719290" lastFinishedPulling="2026-02-16 13:26:43.823345611 +0000 UTC m=+1403.150059339" observedRunningTime="2026-02-16 13:26:44.499057376 +0000 UTC m=+1403.825771104" watchObservedRunningTime="2026-02-16 13:26:44.50246954 +0000 UTC m=+1403.829183268" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.038452 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.121704 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-public-tls-certs\") pod \"4ae7c256-cd2e-4919-a488-84526307d47c\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.121775 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-ovndb-tls-certs\") pod \"4ae7c256-cd2e-4919-a488-84526307d47c\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.121802 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-config\") pod \"4ae7c256-cd2e-4919-a488-84526307d47c\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.121873 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-internal-tls-certs\") pod \"4ae7c256-cd2e-4919-a488-84526307d47c\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.121906 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-httpd-config\") pod \"4ae7c256-cd2e-4919-a488-84526307d47c\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.121969 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-combined-ca-bundle\") pod \"4ae7c256-cd2e-4919-a488-84526307d47c\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.122054 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhnpq\" (UniqueName: \"kubernetes.io/projected/4ae7c256-cd2e-4919-a488-84526307d47c-kube-api-access-mhnpq\") pod \"4ae7c256-cd2e-4919-a488-84526307d47c\" (UID: \"4ae7c256-cd2e-4919-a488-84526307d47c\") " Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.132780 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ae7c256-cd2e-4919-a488-84526307d47c-kube-api-access-mhnpq" (OuterVolumeSpecName: "kube-api-access-mhnpq") pod "4ae7c256-cd2e-4919-a488-84526307d47c" (UID: "4ae7c256-cd2e-4919-a488-84526307d47c"). InnerVolumeSpecName "kube-api-access-mhnpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.138112 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "4ae7c256-cd2e-4919-a488-84526307d47c" (UID: "4ae7c256-cd2e-4919-a488-84526307d47c"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.181447 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4ae7c256-cd2e-4919-a488-84526307d47c" (UID: "4ae7c256-cd2e-4919-a488-84526307d47c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.191486 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-config" (OuterVolumeSpecName: "config") pod "4ae7c256-cd2e-4919-a488-84526307d47c" (UID: "4ae7c256-cd2e-4919-a488-84526307d47c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.193842 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4ae7c256-cd2e-4919-a488-84526307d47c" (UID: "4ae7c256-cd2e-4919-a488-84526307d47c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.195811 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4ae7c256-cd2e-4919-a488-84526307d47c" (UID: "4ae7c256-cd2e-4919-a488-84526307d47c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.208743 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "4ae7c256-cd2e-4919-a488-84526307d47c" (UID: "4ae7c256-cd2e-4919-a488-84526307d47c"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.223889 4816 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.223933 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.223944 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.223956 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhnpq\" (UniqueName: \"kubernetes.io/projected/4ae7c256-cd2e-4919-a488-84526307d47c-kube-api-access-mhnpq\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.223969 4816 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.223979 4816 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.223990 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/4ae7c256-cd2e-4919-a488-84526307d47c-config\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.499392 4816 generic.go:334] "Generic (PLEG): container finished" podID="4ae7c256-cd2e-4919-a488-84526307d47c" containerID="a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41" exitCode=0 Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.499491 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8ff9ccb6f-bwqh8" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.499482 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8ff9ccb6f-bwqh8" event={"ID":"4ae7c256-cd2e-4919-a488-84526307d47c","Type":"ContainerDied","Data":"a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41"} Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.499556 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8ff9ccb6f-bwqh8" event={"ID":"4ae7c256-cd2e-4919-a488-84526307d47c","Type":"ContainerDied","Data":"865c7c280a9a941bf9435d2fb5fb52ec7f41d12353c91825b170a42f294f61ec"} Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.499581 4816 scope.go:117] "RemoveContainer" containerID="4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea" Feb 16 13:26:45 crc kubenswrapper[4816]: E0216 13:26:45.516405 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ae7c256_cd2e_4919_a488_84526307d47c.slice\": RecentStats: unable to find data in memory cache]" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.528068 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8ff9ccb6f-bwqh8"] Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.531831 4816 scope.go:117] "RemoveContainer" containerID="a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.538084 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-8ff9ccb6f-bwqh8"] Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.553751 4816 scope.go:117] "RemoveContainer" containerID="4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea" Feb 16 13:26:45 crc kubenswrapper[4816]: E0216 13:26:45.554356 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea\": container with ID starting with 4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea not found: ID does not exist" containerID="4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.554565 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea"} err="failed to get container status \"4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea\": rpc error: code = NotFound desc = could not find container \"4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea\": container with ID starting with 4efb2bc16ad7920ead1cebe58cf9958403fc1de095c3a9214b844d535941beea not found: ID does not exist" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.554688 4816 scope.go:117] "RemoveContainer" containerID="a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41" Feb 16 13:26:45 crc kubenswrapper[4816]: E0216 13:26:45.555062 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41\": container with ID starting with a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41 not found: ID does not exist" containerID="a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41" Feb 16 13:26:45 crc kubenswrapper[4816]: I0216 13:26:45.555139 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41"} err="failed to get container status \"a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41\": rpc error: code = NotFound desc = could not find container \"a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41\": container with ID starting with a14ab708906df8832635cd1a03dd5f590e369b77a6fc5f24dbccea35599f6b41 not found: ID does not exist" Feb 16 13:26:47 crc kubenswrapper[4816]: E0216 13:26:47.063712 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:47 crc kubenswrapper[4816]: E0216 13:26:47.064208 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:47 crc kubenswrapper[4816]: E0216 13:26:47.064568 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:47 crc kubenswrapper[4816]: E0216 13:26:47.064595 4816 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" Feb 16 13:26:47 crc kubenswrapper[4816]: E0216 13:26:47.064948 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:47 crc kubenswrapper[4816]: E0216 13:26:47.066905 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:47 crc kubenswrapper[4816]: E0216 13:26:47.068173 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:47 crc kubenswrapper[4816]: E0216 13:26:47.068216 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" Feb 16 13:26:47 crc kubenswrapper[4816]: I0216 13:26:47.408900 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" path="/var/lib/kubelet/pods/4ae7c256-cd2e-4919-a488-84526307d47c/volumes" Feb 16 13:26:49 crc kubenswrapper[4816]: I0216 13:26:49.872420 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:49 crc kubenswrapper[4816]: I0216 13:26:49.873270 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:49 crc kubenswrapper[4816]: I0216 13:26:49.917973 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:50 crc kubenswrapper[4816]: I0216 13:26:50.616891 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:52 crc kubenswrapper[4816]: E0216 13:26:52.064389 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:52 crc kubenswrapper[4816]: E0216 13:26:52.065144 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:52 crc kubenswrapper[4816]: E0216 13:26:52.065498 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:52 crc kubenswrapper[4816]: E0216 13:26:52.065553 4816 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" Feb 16 13:26:52 crc kubenswrapper[4816]: E0216 13:26:52.067123 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:52 crc kubenswrapper[4816]: E0216 13:26:52.069040 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:52 crc kubenswrapper[4816]: E0216 13:26:52.070493 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:52 crc kubenswrapper[4816]: E0216 13:26:52.070530 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" Feb 16 13:26:54 crc kubenswrapper[4816]: I0216 13:26:54.042868 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-79lxg"] Feb 16 13:26:54 crc kubenswrapper[4816]: I0216 13:26:54.043448 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-79lxg" podUID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerName="registry-server" containerID="cri-o://b00a7c86ddba637b2b33086d579ef21f75b5e65d9184acf13ae7d2382834b246" gracePeriod=2 Feb 16 13:26:54 crc kubenswrapper[4816]: I0216 13:26:54.606911 4816 generic.go:334] "Generic (PLEG): container finished" podID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerID="b00a7c86ddba637b2b33086d579ef21f75b5e65d9184acf13ae7d2382834b246" exitCode=0 Feb 16 13:26:54 crc kubenswrapper[4816]: I0216 13:26:54.607333 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-79lxg" event={"ID":"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc","Type":"ContainerDied","Data":"b00a7c86ddba637b2b33086d579ef21f75b5e65d9184acf13ae7d2382834b246"} Feb 16 13:26:54 crc kubenswrapper[4816]: I0216 13:26:54.972217 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.076760 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-utilities\") pod \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.076820 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-catalog-content\") pod \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.076874 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54hkz\" (UniqueName: \"kubernetes.io/projected/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-kube-api-access-54hkz\") pod \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\" (UID: \"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc\") " Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.077709 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-utilities" (OuterVolumeSpecName: "utilities") pod "5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" (UID: "5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.090911 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-kube-api-access-54hkz" (OuterVolumeSpecName: "kube-api-access-54hkz") pod "5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" (UID: "5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc"). InnerVolumeSpecName "kube-api-access-54hkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.178785 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.178821 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54hkz\" (UniqueName: \"kubernetes.io/projected/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-kube-api-access-54hkz\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.209343 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" (UID: "5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.280443 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.615687 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-79lxg" event={"ID":"5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc","Type":"ContainerDied","Data":"7efafeb555a4553e94b33a0cfa746c7d8937aa09c6bb064fa49b59436c242e22"} Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.615734 4816 scope.go:117] "RemoveContainer" containerID="b00a7c86ddba637b2b33086d579ef21f75b5e65d9184acf13ae7d2382834b246" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.615854 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-79lxg" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.641751 4816 scope.go:117] "RemoveContainer" containerID="9267d37793171e1f7565a8239d5140fb440c4fc1c97cae3245f7cb9f009be472" Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.642544 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-79lxg"] Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.648113 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-79lxg"] Feb 16 13:26:55 crc kubenswrapper[4816]: I0216 13:26:55.668457 4816 scope.go:117] "RemoveContainer" containerID="c762c9dddd8c167844d13e117df3ab2d2eaf2e7589c1a20941d53f7d9ccfa2f3" Feb 16 13:26:57 crc kubenswrapper[4816]: E0216 13:26:57.064447 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:57 crc kubenswrapper[4816]: E0216 13:26:57.065020 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:57 crc kubenswrapper[4816]: E0216 13:26:57.065419 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Feb 16 13:26:57 crc kubenswrapper[4816]: E0216 13:26:57.065455 4816 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" Feb 16 13:26:57 crc kubenswrapper[4816]: E0216 13:26:57.066254 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:57 crc kubenswrapper[4816]: E0216 13:26:57.067785 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:57 crc kubenswrapper[4816]: E0216 13:26:57.069145 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Feb 16 13:26:57 crc kubenswrapper[4816]: E0216 13:26:57.069201 4816 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-rfd9r" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" Feb 16 13:26:57 crc kubenswrapper[4816]: I0216 13:26:57.409281 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" path="/var/lib/kubelet/pods/5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc/volumes" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.307238 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.323379 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.423325 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") pod \"809ff1b2-f365-4513-89a1-aed781f4b4aa\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.423495 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-combined-ca-bundle\") pod \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.423700 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59470ba6-bdc1-455a-abeb-f0757dcba5f6-etc-machine-id\") pod \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.423831 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom\") pod \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.424523 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts\") pod \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.424709 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"809ff1b2-f365-4513-89a1-aed781f4b4aa\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.424818 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-lock\") pod \"809ff1b2-f365-4513-89a1-aed781f4b4aa\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.424912 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8nw8\" (UniqueName: \"kubernetes.io/projected/59470ba6-bdc1-455a-abeb-f0757dcba5f6-kube-api-access-q8nw8\") pod \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.425020 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data\") pod \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\" (UID: \"59470ba6-bdc1-455a-abeb-f0757dcba5f6\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.425147 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809ff1b2-f365-4513-89a1-aed781f4b4aa-combined-ca-bundle\") pod \"809ff1b2-f365-4513-89a1-aed781f4b4aa\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.425248 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tw2z\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-kube-api-access-7tw2z\") pod \"809ff1b2-f365-4513-89a1-aed781f4b4aa\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.425353 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-cache\") pod \"809ff1b2-f365-4513-89a1-aed781f4b4aa\" (UID: \"809ff1b2-f365-4513-89a1-aed781f4b4aa\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.423750 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/59470ba6-bdc1-455a-abeb-f0757dcba5f6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "59470ba6-bdc1-455a-abeb-f0757dcba5f6" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.425520 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-lock" (OuterVolumeSpecName: "lock") pod "809ff1b2-f365-4513-89a1-aed781f4b4aa" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.425846 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-cache" (OuterVolumeSpecName: "cache") pod "809ff1b2-f365-4513-89a1-aed781f4b4aa" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.429624 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "809ff1b2-f365-4513-89a1-aed781f4b4aa" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.429746 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "swift") pod "809ff1b2-f365-4513-89a1-aed781f4b4aa" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.430099 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "59470ba6-bdc1-455a-abeb-f0757dcba5f6" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.430315 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts" (OuterVolumeSpecName: "scripts") pod "59470ba6-bdc1-455a-abeb-f0757dcba5f6" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.431318 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-kube-api-access-7tw2z" (OuterVolumeSpecName: "kube-api-access-7tw2z") pod "809ff1b2-f365-4513-89a1-aed781f4b4aa" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa"). InnerVolumeSpecName "kube-api-access-7tw2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.432801 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59470ba6-bdc1-455a-abeb-f0757dcba5f6-kube-api-access-q8nw8" (OuterVolumeSpecName: "kube-api-access-q8nw8") pod "59470ba6-bdc1-455a-abeb-f0757dcba5f6" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6"). InnerVolumeSpecName "kube-api-access-q8nw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.463212 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59470ba6-bdc1-455a-abeb-f0757dcba5f6" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.513852 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data" (OuterVolumeSpecName: "config-data") pod "59470ba6-bdc1-455a-abeb-f0757dcba5f6" (UID: "59470ba6-bdc1-455a-abeb-f0757dcba5f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527108 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527141 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527153 4816 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-lock\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527163 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8nw8\" (UniqueName: \"kubernetes.io/projected/59470ba6-bdc1-455a-abeb-f0757dcba5f6-kube-api-access-q8nw8\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527172 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527181 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tw2z\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-kube-api-access-7tw2z\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527189 4816 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/809ff1b2-f365-4513-89a1-aed781f4b4aa-cache\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527213 4816 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/809ff1b2-f365-4513-89a1-aed781f4b4aa-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527222 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527230 4816 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/59470ba6-bdc1-455a-abeb-f0757dcba5f6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.527240 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/59470ba6-bdc1-455a-abeb-f0757dcba5f6-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.542635 4816 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.666535 4816 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.674210 4816 generic.go:334] "Generic (PLEG): container finished" podID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerID="18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807" exitCode=137 Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.674290 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"59470ba6-bdc1-455a-abeb-f0757dcba5f6","Type":"ContainerDied","Data":"18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807"} Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.674335 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"59470ba6-bdc1-455a-abeb-f0757dcba5f6","Type":"ContainerDied","Data":"862d6bf0589f74958a908ba0bad11a5aa60dbe4a8a939cf373a7917129d0e4a6"} Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.674355 4816 scope.go:117] "RemoveContainer" containerID="934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.674502 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.682914 4816 generic.go:334] "Generic (PLEG): container finished" podID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerID="b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591" exitCode=137 Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.682975 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591"} Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.683002 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"809ff1b2-f365-4513-89a1-aed781f4b4aa","Type":"ContainerDied","Data":"b9ba6f9e177ade30a6e030412df1bef59d77c7d7e14b218e8a5db7aa6b066631"} Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.683099 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.687843 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rfd9r_f4ac4b8a-a945-4f89-9ae4-933ab04dce2b/ovs-vswitchd/0.log" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.692255 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" exitCode=137 Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.692293 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rfd9r" event={"ID":"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b","Type":"ContainerDied","Data":"7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c"} Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.719479 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.720244 4816 scope.go:117] "RemoveContainer" containerID="18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.724730 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.738299 4816 scope.go:117] "RemoveContainer" containerID="934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e" Feb 16 13:26:58 crc kubenswrapper[4816]: E0216 13:26:58.738786 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e\": container with ID starting with 934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e not found: ID does not exist" containerID="934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.738822 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e"} err="failed to get container status \"934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e\": rpc error: code = NotFound desc = could not find container \"934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e\": container with ID starting with 934ee35b686f15ef55d5cfc88112808fc9be563d8d8342ea511dd4f66266fd2e not found: ID does not exist" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.738845 4816 scope.go:117] "RemoveContainer" containerID="18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807" Feb 16 13:26:58 crc kubenswrapper[4816]: E0216 13:26:58.739185 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807\": container with ID starting with 18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807 not found: ID does not exist" containerID="18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.739230 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807"} err="failed to get container status \"18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807\": rpc error: code = NotFound desc = could not find container \"18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807\": container with ID starting with 18f865059083ef6e626f1263cc5ef522110c4187363c95d1487cf4b22cbf1807 not found: ID does not exist" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.739262 4816 scope.go:117] "RemoveContainer" containerID="b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.761154 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/809ff1b2-f365-4513-89a1-aed781f4b4aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "809ff1b2-f365-4513-89a1-aed781f4b4aa" (UID: "809ff1b2-f365-4513-89a1-aed781f4b4aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.767508 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809ff1b2-f365-4513-89a1-aed781f4b4aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.781986 4816 scope.go:117] "RemoveContainer" containerID="2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.802644 4816 scope.go:117] "RemoveContainer" containerID="decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.820618 4816 scope.go:117] "RemoveContainer" containerID="64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.844425 4816 scope.go:117] "RemoveContainer" containerID="688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.857925 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rfd9r_f4ac4b8a-a945-4f89-9ae4-933ab04dce2b/ovs-vswitchd/0.log" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.858524 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.866557 4816 scope.go:117] "RemoveContainer" containerID="5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.911123 4816 scope.go:117] "RemoveContainer" containerID="403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982193 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzwqc\" (UniqueName: \"kubernetes.io/projected/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-kube-api-access-jzwqc\") pod \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982282 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-etc-ovs\") pod \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982314 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-run\") pod \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982350 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-lib\") pod \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982420 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-scripts\") pod \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982466 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-log\") pod \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\" (UID: \"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b\") " Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982527 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-run" (OuterVolumeSpecName: "var-run") pod "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" (UID: "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982860 4816 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-run\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982914 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-log" (OuterVolumeSpecName: "var-log") pod "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" (UID: "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.982945 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-lib" (OuterVolumeSpecName: "var-lib") pod "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" (UID: "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.983756 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" (UID: "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.987881 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-kube-api-access-jzwqc" (OuterVolumeSpecName: "kube-api-access-jzwqc") pod "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" (UID: "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b"). InnerVolumeSpecName "kube-api-access-jzwqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.994854 4816 scope.go:117] "RemoveContainer" containerID="bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66" Feb 16 13:26:58 crc kubenswrapper[4816]: I0216 13:26:58.995368 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-scripts" (OuterVolumeSpecName: "scripts") pod "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" (UID: "f4ac4b8a-a945-4f89-9ae4-933ab04dce2b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.034184 4816 scope.go:117] "RemoveContainer" containerID="f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.039009 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.044739 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.079402 4816 scope.go:117] "RemoveContainer" containerID="227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.084799 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzwqc\" (UniqueName: \"kubernetes.io/projected/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-kube-api-access-jzwqc\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.084880 4816 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-etc-ovs\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.084895 4816 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-lib\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.084927 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.084939 4816 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b-var-log\") on node \"crc\" DevicePath \"\"" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.100686 4816 scope.go:117] "RemoveContainer" containerID="588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.119070 4816 scope.go:117] "RemoveContainer" containerID="8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.136154 4816 scope.go:117] "RemoveContainer" containerID="028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.156853 4816 scope.go:117] "RemoveContainer" containerID="a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.171474 4816 scope.go:117] "RemoveContainer" containerID="f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.187910 4816 scope.go:117] "RemoveContainer" containerID="b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.188368 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591\": container with ID starting with b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591 not found: ID does not exist" containerID="b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.188399 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591"} err="failed to get container status \"b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591\": rpc error: code = NotFound desc = could not find container \"b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591\": container with ID starting with b20c240feeef68787633cd4a561a34b85b3e99b1bf1b71fbcf6cf0188cec6591 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.188422 4816 scope.go:117] "RemoveContainer" containerID="2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.188711 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f\": container with ID starting with 2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f not found: ID does not exist" containerID="2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.188772 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f"} err="failed to get container status \"2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f\": rpc error: code = NotFound desc = could not find container \"2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f\": container with ID starting with 2848dc1fa21f629588a82ca83fb7fe344a419a3d9a5b63575485560995acfc1f not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.188801 4816 scope.go:117] "RemoveContainer" containerID="decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.189104 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837\": container with ID starting with decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837 not found: ID does not exist" containerID="decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.189130 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837"} err="failed to get container status \"decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837\": rpc error: code = NotFound desc = could not find container \"decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837\": container with ID starting with decb8a7fe14959a2478f81de76fa3fd252afb4e4a1303720c5cfebef57c70837 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.189148 4816 scope.go:117] "RemoveContainer" containerID="64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.189373 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856\": container with ID starting with 64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856 not found: ID does not exist" containerID="64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.189414 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856"} err="failed to get container status \"64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856\": rpc error: code = NotFound desc = could not find container \"64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856\": container with ID starting with 64854e6791bce8007e29cb7e5d05ef1b8cb29208679681db9db7efcd384fa856 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.189430 4816 scope.go:117] "RemoveContainer" containerID="688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.189700 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c\": container with ID starting with 688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c not found: ID does not exist" containerID="688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.189725 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c"} err="failed to get container status \"688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c\": rpc error: code = NotFound desc = could not find container \"688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c\": container with ID starting with 688d8b9c4452f7c003138dfa816adc2b2843d345cdf32880ac8d5222dc2e474c not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.189739 4816 scope.go:117] "RemoveContainer" containerID="5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.189983 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5\": container with ID starting with 5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5 not found: ID does not exist" containerID="5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.190013 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5"} err="failed to get container status \"5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5\": rpc error: code = NotFound desc = could not find container \"5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5\": container with ID starting with 5d55f10059b3b43f82f6ac596dc3051b1eaf91d46d7d871e35ba358241d005b5 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.190029 4816 scope.go:117] "RemoveContainer" containerID="403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.190260 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71\": container with ID starting with 403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71 not found: ID does not exist" containerID="403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.190285 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71"} err="failed to get container status \"403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71\": rpc error: code = NotFound desc = could not find container \"403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71\": container with ID starting with 403154c29fa7f1ad284a17a1c1092d6f320e6c4b8248c6188a56a4e50d20fe71 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.190299 4816 scope.go:117] "RemoveContainer" containerID="bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.190612 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66\": container with ID starting with bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66 not found: ID does not exist" containerID="bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.190683 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66"} err="failed to get container status \"bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66\": rpc error: code = NotFound desc = could not find container \"bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66\": container with ID starting with bf72681dd18ca8d3e476456a6672540c16863f7daccb2d3ca8878dced5e30e66 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.190728 4816 scope.go:117] "RemoveContainer" containerID="f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.191055 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363\": container with ID starting with f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363 not found: ID does not exist" containerID="f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.191122 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363"} err="failed to get container status \"f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363\": rpc error: code = NotFound desc = could not find container \"f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363\": container with ID starting with f955f7d89c9af03a19708daeefe522bfbc1a8ae8735254f2a9cb5e7d983ee363 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.191150 4816 scope.go:117] "RemoveContainer" containerID="227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.191436 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad\": container with ID starting with 227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad not found: ID does not exist" containerID="227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.191465 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad"} err="failed to get container status \"227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad\": rpc error: code = NotFound desc = could not find container \"227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad\": container with ID starting with 227ecfc1d49fa1f6a075a33815b4c9f17eaaf09937050182af5d38bc817152ad not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.191485 4816 scope.go:117] "RemoveContainer" containerID="588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.191723 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df\": container with ID starting with 588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df not found: ID does not exist" containerID="588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.191753 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df"} err="failed to get container status \"588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df\": rpc error: code = NotFound desc = could not find container \"588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df\": container with ID starting with 588055b805adebd03f8f35ea3f93fc4a27665a06aa1818d89e48f1dfe10e41df not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.191776 4816 scope.go:117] "RemoveContainer" containerID="8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.192005 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958\": container with ID starting with 8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958 not found: ID does not exist" containerID="8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.192040 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958"} err="failed to get container status \"8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958\": rpc error: code = NotFound desc = could not find container \"8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958\": container with ID starting with 8ca6ba2339d0d7b9de7b195e9222a48008d61600dc632c7546cb47ca02614958 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.192060 4816 scope.go:117] "RemoveContainer" containerID="028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.192300 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055\": container with ID starting with 028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055 not found: ID does not exist" containerID="028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.192333 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055"} err="failed to get container status \"028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055\": rpc error: code = NotFound desc = could not find container \"028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055\": container with ID starting with 028aacdd446faeba8e2da6eb0aea32145fe566c5d692551df0936ea3406cb055 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.192357 4816 scope.go:117] "RemoveContainer" containerID="a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.192565 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0\": container with ID starting with a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0 not found: ID does not exist" containerID="a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.192595 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0"} err="failed to get container status \"a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0\": rpc error: code = NotFound desc = could not find container \"a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0\": container with ID starting with a261366b582510a2d102a4527d090f458eb4738b37926180369680e23b96ddf0 not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.192615 4816 scope.go:117] "RemoveContainer" containerID="f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba" Feb 16 13:26:59 crc kubenswrapper[4816]: E0216 13:26:59.192863 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba\": container with ID starting with f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba not found: ID does not exist" containerID="f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.192896 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba"} err="failed to get container status \"f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba\": rpc error: code = NotFound desc = could not find container \"f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba\": container with ID starting with f936e7e25244208136c5a02749c165bb9f5c2e751219d0ab837f1be095be07ba not found: ID does not exist" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.414770 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" path="/var/lib/kubelet/pods/59470ba6-bdc1-455a-abeb-f0757dcba5f6/volumes" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.415775 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" path="/var/lib/kubelet/pods/809ff1b2-f365-4513-89a1-aed781f4b4aa/volumes" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.709396 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rfd9r_f4ac4b8a-a945-4f89-9ae4-933ab04dce2b/ovs-vswitchd/0.log" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.710208 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rfd9r" event={"ID":"f4ac4b8a-a945-4f89-9ae4-933ab04dce2b","Type":"ContainerDied","Data":"8790e8895d3624d6c28689d4fe219054db76d82b207424cf0fd1b1cb8276b9ff"} Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.710267 4816 scope.go:117] "RemoveContainer" containerID="7271caccd1d2eb31d31cb39f1cf17df9bb5ccb2ec067592ae39670e121d1189c" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.710410 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rfd9r" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.737589 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-rfd9r"] Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.741211 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-rfd9r"] Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.743681 4816 scope.go:117] "RemoveContainer" containerID="8fcc4ea78e23490b49cfc9e8f3be0856415a850715bb39db460113a5d9fbd744" Feb 16 13:26:59 crc kubenswrapper[4816]: I0216 13:26:59.768150 4816 scope.go:117] "RemoveContainer" containerID="5ab245e75ae4f94ade8ab4cfddcda099b8ce7f43e1b4902b3fb5af79e51718b4" Feb 16 13:27:01 crc kubenswrapper[4816]: I0216 13:27:01.421701 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" path="/var/lib/kubelet/pods/f4ac4b8a-a945-4f89-9ae4-933ab04dce2b/volumes" Feb 16 13:27:03 crc kubenswrapper[4816]: I0216 13:27:03.008792 4816 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podddbe806c-23ac-4f2f-87e1-be1ec2189c87"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podddbe806c-23ac-4f2f-87e1-be1ec2189c87] : Timed out while waiting for systemd to remove kubepods-besteffort-podddbe806c_23ac_4f2f_87e1_be1ec2189c87.slice" Feb 16 13:27:03 crc kubenswrapper[4816]: I0216 13:27:03.112548 4816 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","poddeed1b26-cbe9-476b-8cc3-9898c6ad929f"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort poddeed1b26-cbe9-476b-8cc3-9898c6ad929f] : Timed out while waiting for systemd to remove kubepods-besteffort-poddeed1b26_cbe9_476b_8cc3_9898c6ad929f.slice" Feb 16 13:27:03 crc kubenswrapper[4816]: E0216 13:27:03.112601 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort poddeed1b26-cbe9-476b-8cc3-9898c6ad929f] : unable to destroy cgroup paths for cgroup [kubepods besteffort poddeed1b26-cbe9-476b-8cc3-9898c6ad929f] : Timed out while waiting for systemd to remove kubepods-besteffort-poddeed1b26_cbe9_476b_8cc3_9898c6ad929f.slice" pod="openstack/placement-6847-account-create-update-vdkv9" podUID="deed1b26-cbe9-476b-8cc3-9898c6ad929f" Feb 16 13:27:03 crc kubenswrapper[4816]: I0216 13:27:03.746587 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6847-account-create-update-vdkv9" Feb 16 13:27:03 crc kubenswrapper[4816]: I0216 13:27:03.785113 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6847-account-create-update-vdkv9"] Feb 16 13:27:03 crc kubenswrapper[4816]: I0216 13:27:03.791474 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-6847-account-create-update-vdkv9"] Feb 16 13:27:05 crc kubenswrapper[4816]: I0216 13:27:05.409048 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="deed1b26-cbe9-476b-8cc3-9898c6ad929f" path="/var/lib/kubelet/pods/deed1b26-cbe9-476b-8cc3-9898c6ad929f/volumes" Feb 16 13:27:06 crc kubenswrapper[4816]: I0216 13:27:06.940995 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:27:06 crc kubenswrapper[4816]: I0216 13:27:06.941084 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:27:36 crc kubenswrapper[4816]: I0216 13:27:36.941357 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:27:36 crc kubenswrapper[4816]: I0216 13:27:36.942250 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:28:06 crc kubenswrapper[4816]: I0216 13:28:06.941151 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:28:06 crc kubenswrapper[4816]: I0216 13:28:06.942733 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:28:06 crc kubenswrapper[4816]: I0216 13:28:06.942896 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:28:06 crc kubenswrapper[4816]: I0216 13:28:06.943666 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"95aa8a6a6708ed2afc1dbaedfa775efb1b58e3f80a0695507163b402563b9cf2"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:28:06 crc kubenswrapper[4816]: I0216 13:28:06.943825 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://95aa8a6a6708ed2afc1dbaedfa775efb1b58e3f80a0695507163b402563b9cf2" gracePeriod=600 Feb 16 13:28:07 crc kubenswrapper[4816]: I0216 13:28:07.691876 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="95aa8a6a6708ed2afc1dbaedfa775efb1b58e3f80a0695507163b402563b9cf2" exitCode=0 Feb 16 13:28:07 crc kubenswrapper[4816]: I0216 13:28:07.691943 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"95aa8a6a6708ed2afc1dbaedfa775efb1b58e3f80a0695507163b402563b9cf2"} Feb 16 13:28:07 crc kubenswrapper[4816]: I0216 13:28:07.692384 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2"} Feb 16 13:28:07 crc kubenswrapper[4816]: I0216 13:28:07.692408 4816 scope.go:117] "RemoveContainer" containerID="a16107fccce9c93e96a6d43d25ee1381b11a663b98df0e1296331b66fbfb375f" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.436418 4816 scope.go:117] "RemoveContainer" containerID="c983d929214d9a1e3bd142f2e8f7bf5e969c9d077da70020eb59bde75ca3eb44" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.475767 4816 scope.go:117] "RemoveContainer" containerID="8c2d4de596683493b52e68cd5bdff78061eb50a65a2f9b18774d741c4b454147" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.525914 4816 scope.go:117] "RemoveContainer" containerID="a69d3b00643ccd3897caf83c0b51bd02143f4dc6c41e62039441a9238ec6fb07" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.560732 4816 scope.go:117] "RemoveContainer" containerID="c6f647e6f8e63892a1f0767746c765816e5f687f2317d30098382a81bbb1331b" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.598601 4816 scope.go:117] "RemoveContainer" containerID="feea9035d59c55445bcca93b4b1d6b7b8ed4d1f3147f21bda9862dd9c1e1b9f5" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.638844 4816 scope.go:117] "RemoveContainer" containerID="1307433013f8fc69cb64509a7a0a11818f2c756bea9194bef6578614e0d17a73" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.680852 4816 scope.go:117] "RemoveContainer" containerID="004c0da5fb3afa4a342ed9b071e640e332b3652bacb749a155ade0c4ff13924a" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.708608 4816 scope.go:117] "RemoveContainer" containerID="e63f76aba075ce81a3ca07eb9f040953dc5a9cadaac3012c7e2ddbbae73f9f45" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.732172 4816 scope.go:117] "RemoveContainer" containerID="1b3d6bc3eedad7ccfb9c38488642bb42e53f8190c7302116b2a8895960a5652d" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.765232 4816 scope.go:117] "RemoveContainer" containerID="bcea712aa54e81de52474aa92b168797ba2d5c183cbb8b20e136d9c44d98a7ca" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.790621 4816 scope.go:117] "RemoveContainer" containerID="581ca028c0ca32ee280e69406751f61f447512e6984f0d2dbcc8b22666a5614b" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.820810 4816 scope.go:117] "RemoveContainer" containerID="c0d25ba920e734873c0067159aea21f12858190b0e863e919da687bb7caef5c0" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.875552 4816 scope.go:117] "RemoveContainer" containerID="4bf380764ceb5ddaf487fb4521c9455b43fabb3c7c18c66b33792776e7c2f38e" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.898416 4816 scope.go:117] "RemoveContainer" containerID="0c593b579f73184d42db2241d7d922c67c9bac01382ab293a7dabf842695c8bf" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.920518 4816 scope.go:117] "RemoveContainer" containerID="9401e9bc0e7d30596699b27b35b8c23305237b7533a1ac0a6c3b64a65f9f4905" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.939297 4816 scope.go:117] "RemoveContainer" containerID="80db716e3daf7dccdb960d0101521240725a4a2754c21a25fd3fae9a938eb708" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.961386 4816 scope.go:117] "RemoveContainer" containerID="a52319a3a18c673b7ada74018de88a1b6d2bfa72422d9eec639505e94d2a6b88" Feb 16 13:28:23 crc kubenswrapper[4816]: I0216 13:28:23.981295 4816 scope.go:117] "RemoveContainer" containerID="7ca1dd77f7cc1f4b3821489cc3c35cd369aec47735907be42c9c819ae55d1672" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.174377 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7shf9"] Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175316 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-updater" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.175339 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-updater" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175373 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server-init" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.175386 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server-init" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175402 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-reaper" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.175414 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-reaper" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175428 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.175454 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175469 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerName="extract-content" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.175481 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerName="extract-content" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175504 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="swift-recon-cron" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.175517 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="swift-recon-cron" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175543 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="rsync" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.175555 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="rsync" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175576 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-replicator" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.175588 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-replicator" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175612 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-replicator" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.175623 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-replicator" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.175646 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-updater" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.178626 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-updater" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.178716 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-auditor" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.178731 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-auditor" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.178772 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" containerName="neutron-api" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.178785 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" containerName="neutron-api" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.178809 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" containerName="neutron-httpd" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.178821 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" containerName="neutron-httpd" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.178839 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.178850 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-server" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.178873 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-expirer" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.178885 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-expirer" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.178904 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-auditor" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.178916 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-auditor" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.178937 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerName="probe" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.178950 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerName="probe" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.178973 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerName="cinder-scheduler" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.178985 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerName="cinder-scheduler" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.179000 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179012 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.179024 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-replicator" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179038 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-replicator" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.179060 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerName="extract-utilities" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179073 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerName="extract-utilities" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.179098 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179110 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-server" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.179131 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179144 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-server" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.179164 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerName="registry-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179176 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerName="registry-server" Feb 16 13:28:51 crc kubenswrapper[4816]: E0216 13:28:51.179195 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-auditor" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179208 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-auditor" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179437 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cfee876-06e8-49dd-8bc4-5a3c1e25a3cc" containerName="registry-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179465 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-updater" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179481 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-auditor" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179497 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-replicator" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179518 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="swift-recon-cron" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179536 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" containerName="neutron-api" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179558 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovs-vswitchd" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179572 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179592 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-updater" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179610 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-expirer" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179625 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-reaper" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179648 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179690 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4ac4b8a-a945-4f89-9ae4-933ab04dce2b" containerName="ovsdb-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179710 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-replicator" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179728 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerName="cinder-scheduler" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179746 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="59470ba6-bdc1-455a-abeb-f0757dcba5f6" containerName="probe" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179770 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="object-auditor" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179782 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="rsync" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179798 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ae7c256-cd2e-4919-a488-84526307d47c" containerName="neutron-httpd" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179821 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-replicator" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179834 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="account-auditor" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.179852 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="809ff1b2-f365-4513-89a1-aed781f4b4aa" containerName="container-server" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.181496 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.196114 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7shf9"] Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.273846 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-catalog-content\") pod \"community-operators-7shf9\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.273896 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r56dw\" (UniqueName: \"kubernetes.io/projected/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-kube-api-access-r56dw\") pod \"community-operators-7shf9\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.273912 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-utilities\") pod \"community-operators-7shf9\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.375312 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-catalog-content\") pod \"community-operators-7shf9\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.375417 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r56dw\" (UniqueName: \"kubernetes.io/projected/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-kube-api-access-r56dw\") pod \"community-operators-7shf9\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.375454 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-utilities\") pod \"community-operators-7shf9\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.375842 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-catalog-content\") pod \"community-operators-7shf9\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.376313 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-utilities\") pod \"community-operators-7shf9\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.399315 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r56dw\" (UniqueName: \"kubernetes.io/projected/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-kube-api-access-r56dw\") pod \"community-operators-7shf9\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:51 crc kubenswrapper[4816]: I0216 13:28:51.535288 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:28:52 crc kubenswrapper[4816]: I0216 13:28:52.056080 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7shf9"] Feb 16 13:28:52 crc kubenswrapper[4816]: I0216 13:28:52.143388 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7shf9" event={"ID":"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84","Type":"ContainerStarted","Data":"48ee7334672a32803997c1f42f3227c9ed7e697c344243a2bb558b446417e7fa"} Feb 16 13:28:53 crc kubenswrapper[4816]: I0216 13:28:53.180461 4816 generic.go:334] "Generic (PLEG): container finished" podID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerID="7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3" exitCode=0 Feb 16 13:28:53 crc kubenswrapper[4816]: I0216 13:28:53.180596 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7shf9" event={"ID":"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84","Type":"ContainerDied","Data":"7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3"} Feb 16 13:28:54 crc kubenswrapper[4816]: I0216 13:28:54.189075 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7shf9" event={"ID":"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84","Type":"ContainerStarted","Data":"1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed"} Feb 16 13:28:56 crc kubenswrapper[4816]: I0216 13:28:56.209883 4816 generic.go:334] "Generic (PLEG): container finished" podID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerID="1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed" exitCode=0 Feb 16 13:28:56 crc kubenswrapper[4816]: I0216 13:28:56.209996 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7shf9" event={"ID":"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84","Type":"ContainerDied","Data":"1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed"} Feb 16 13:28:57 crc kubenswrapper[4816]: I0216 13:28:57.220455 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7shf9" event={"ID":"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84","Type":"ContainerStarted","Data":"734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8"} Feb 16 13:28:57 crc kubenswrapper[4816]: I0216 13:28:57.248315 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7shf9" podStartSLOduration=2.81819027 podStartE2EDuration="6.248296891s" podCreationTimestamp="2026-02-16 13:28:51 +0000 UTC" firstStartedPulling="2026-02-16 13:28:53.182674134 +0000 UTC m=+1532.509387872" lastFinishedPulling="2026-02-16 13:28:56.612780725 +0000 UTC m=+1535.939494493" observedRunningTime="2026-02-16 13:28:57.246437501 +0000 UTC m=+1536.573151249" watchObservedRunningTime="2026-02-16 13:28:57.248296891 +0000 UTC m=+1536.575010629" Feb 16 13:29:01 crc kubenswrapper[4816]: I0216 13:29:01.536452 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:29:01 crc kubenswrapper[4816]: I0216 13:29:01.537315 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:29:01 crc kubenswrapper[4816]: I0216 13:29:01.575507 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:29:02 crc kubenswrapper[4816]: I0216 13:29:02.323811 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:29:02 crc kubenswrapper[4816]: I0216 13:29:02.376259 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7shf9"] Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.271943 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7shf9" podUID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerName="registry-server" containerID="cri-o://734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8" gracePeriod=2 Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.666562 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.774503 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r56dw\" (UniqueName: \"kubernetes.io/projected/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-kube-api-access-r56dw\") pod \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.774579 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-utilities\") pod \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.774624 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-catalog-content\") pod \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\" (UID: \"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84\") " Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.775801 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-utilities" (OuterVolumeSpecName: "utilities") pod "fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" (UID: "fb1d630d-ac66-4a81-a1cf-a72d9b17ee84"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.780031 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-kube-api-access-r56dw" (OuterVolumeSpecName: "kube-api-access-r56dw") pod "fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" (UID: "fb1d630d-ac66-4a81-a1cf-a72d9b17ee84"). InnerVolumeSpecName "kube-api-access-r56dw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.856046 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" (UID: "fb1d630d-ac66-4a81-a1cf-a72d9b17ee84"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.875878 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r56dw\" (UniqueName: \"kubernetes.io/projected/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-kube-api-access-r56dw\") on node \"crc\" DevicePath \"\"" Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.875921 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:29:04 crc kubenswrapper[4816]: I0216 13:29:04.875934 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.282910 4816 generic.go:334] "Generic (PLEG): container finished" podID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerID="734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8" exitCode=0 Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.282970 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7shf9" event={"ID":"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84","Type":"ContainerDied","Data":"734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8"} Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.283011 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7shf9" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.283061 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7shf9" event={"ID":"fb1d630d-ac66-4a81-a1cf-a72d9b17ee84","Type":"ContainerDied","Data":"48ee7334672a32803997c1f42f3227c9ed7e697c344243a2bb558b446417e7fa"} Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.283136 4816 scope.go:117] "RemoveContainer" containerID="734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.305907 4816 scope.go:117] "RemoveContainer" containerID="1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.331994 4816 scope.go:117] "RemoveContainer" containerID="7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.341481 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7shf9"] Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.348161 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7shf9"] Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.362392 4816 scope.go:117] "RemoveContainer" containerID="734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8" Feb 16 13:29:05 crc kubenswrapper[4816]: E0216 13:29:05.362753 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8\": container with ID starting with 734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8 not found: ID does not exist" containerID="734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.362787 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8"} err="failed to get container status \"734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8\": rpc error: code = NotFound desc = could not find container \"734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8\": container with ID starting with 734d8914c2604e3adb286cd02a706fef2dc3308213c5e53216de89b8f94805b8 not found: ID does not exist" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.362806 4816 scope.go:117] "RemoveContainer" containerID="1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed" Feb 16 13:29:05 crc kubenswrapper[4816]: E0216 13:29:05.363033 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed\": container with ID starting with 1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed not found: ID does not exist" containerID="1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.363070 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed"} err="failed to get container status \"1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed\": rpc error: code = NotFound desc = could not find container \"1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed\": container with ID starting with 1655f0eb42eec7fa90a94f96ea5d5df6c1eae96c32f638df399e83a9b58e9bed not found: ID does not exist" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.363122 4816 scope.go:117] "RemoveContainer" containerID="7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3" Feb 16 13:29:05 crc kubenswrapper[4816]: E0216 13:29:05.363310 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3\": container with ID starting with 7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3 not found: ID does not exist" containerID="7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.363331 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3"} err="failed to get container status \"7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3\": rpc error: code = NotFound desc = could not find container \"7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3\": container with ID starting with 7f2b441401b12911a278a674ef900afd70371f9508e53bc871fa45937cf473b3 not found: ID does not exist" Feb 16 13:29:05 crc kubenswrapper[4816]: I0216 13:29:05.407697 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" path="/var/lib/kubelet/pods/fb1d630d-ac66-4a81-a1cf-a72d9b17ee84/volumes" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.217236 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6fd42"] Feb 16 13:29:07 crc kubenswrapper[4816]: E0216 13:29:07.217828 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerName="extract-utilities" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.217845 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerName="extract-utilities" Feb 16 13:29:07 crc kubenswrapper[4816]: E0216 13:29:07.217870 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerName="extract-content" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.217877 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerName="extract-content" Feb 16 13:29:07 crc kubenswrapper[4816]: E0216 13:29:07.217890 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerName="registry-server" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.217897 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerName="registry-server" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.218018 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb1d630d-ac66-4a81-a1cf-a72d9b17ee84" containerName="registry-server" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.219008 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.231435 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6fd42"] Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.312871 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-utilities\") pod \"certified-operators-6fd42\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.312941 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxfh6\" (UniqueName: \"kubernetes.io/projected/382e598e-85bd-455e-b0f9-fe31ecb1ee73-kube-api-access-mxfh6\") pod \"certified-operators-6fd42\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.312991 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-catalog-content\") pod \"certified-operators-6fd42\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.414313 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-utilities\") pod \"certified-operators-6fd42\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.414374 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxfh6\" (UniqueName: \"kubernetes.io/projected/382e598e-85bd-455e-b0f9-fe31ecb1ee73-kube-api-access-mxfh6\") pod \"certified-operators-6fd42\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.414421 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-catalog-content\") pod \"certified-operators-6fd42\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.414902 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-catalog-content\") pod \"certified-operators-6fd42\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.415217 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-utilities\") pod \"certified-operators-6fd42\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.437914 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxfh6\" (UniqueName: \"kubernetes.io/projected/382e598e-85bd-455e-b0f9-fe31ecb1ee73-kube-api-access-mxfh6\") pod \"certified-operators-6fd42\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.540424 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:07 crc kubenswrapper[4816]: I0216 13:29:07.994096 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6fd42"] Feb 16 13:29:08 crc kubenswrapper[4816]: I0216 13:29:08.315568 4816 generic.go:334] "Generic (PLEG): container finished" podID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerID="0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef" exitCode=0 Feb 16 13:29:08 crc kubenswrapper[4816]: I0216 13:29:08.315690 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fd42" event={"ID":"382e598e-85bd-455e-b0f9-fe31ecb1ee73","Type":"ContainerDied","Data":"0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef"} Feb 16 13:29:08 crc kubenswrapper[4816]: I0216 13:29:08.316013 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fd42" event={"ID":"382e598e-85bd-455e-b0f9-fe31ecb1ee73","Type":"ContainerStarted","Data":"5895ef3b064270e07720a2e4eb0fd93bd2e93e761edf6a27bba018cf029f850d"} Feb 16 13:29:09 crc kubenswrapper[4816]: I0216 13:29:09.329294 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fd42" event={"ID":"382e598e-85bd-455e-b0f9-fe31ecb1ee73","Type":"ContainerStarted","Data":"a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae"} Feb 16 13:29:10 crc kubenswrapper[4816]: I0216 13:29:10.337999 4816 generic.go:334] "Generic (PLEG): container finished" podID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerID="a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae" exitCode=0 Feb 16 13:29:10 crc kubenswrapper[4816]: I0216 13:29:10.338716 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fd42" event={"ID":"382e598e-85bd-455e-b0f9-fe31ecb1ee73","Type":"ContainerDied","Data":"a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae"} Feb 16 13:29:10 crc kubenswrapper[4816]: I0216 13:29:10.338774 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fd42" event={"ID":"382e598e-85bd-455e-b0f9-fe31ecb1ee73","Type":"ContainerStarted","Data":"1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60"} Feb 16 13:29:10 crc kubenswrapper[4816]: I0216 13:29:10.362461 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6fd42" podStartSLOduration=1.9363806270000001 podStartE2EDuration="3.362444822s" podCreationTimestamp="2026-02-16 13:29:07 +0000 UTC" firstStartedPulling="2026-02-16 13:29:08.316756174 +0000 UTC m=+1547.643469892" lastFinishedPulling="2026-02-16 13:29:09.742820359 +0000 UTC m=+1549.069534087" observedRunningTime="2026-02-16 13:29:10.358077043 +0000 UTC m=+1549.684790791" watchObservedRunningTime="2026-02-16 13:29:10.362444822 +0000 UTC m=+1549.689158550" Feb 16 13:29:17 crc kubenswrapper[4816]: I0216 13:29:17.541181 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:17 crc kubenswrapper[4816]: I0216 13:29:17.542623 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:17 crc kubenswrapper[4816]: I0216 13:29:17.593779 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:18 crc kubenswrapper[4816]: I0216 13:29:18.464091 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:18 crc kubenswrapper[4816]: I0216 13:29:18.509394 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6fd42"] Feb 16 13:29:20 crc kubenswrapper[4816]: I0216 13:29:20.432737 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6fd42" podUID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerName="registry-server" containerID="cri-o://1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60" gracePeriod=2 Feb 16 13:29:20 crc kubenswrapper[4816]: I0216 13:29:20.974950 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.016329 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-utilities\") pod \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.016390 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxfh6\" (UniqueName: \"kubernetes.io/projected/382e598e-85bd-455e-b0f9-fe31ecb1ee73-kube-api-access-mxfh6\") pod \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.016443 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-catalog-content\") pod \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\" (UID: \"382e598e-85bd-455e-b0f9-fe31ecb1ee73\") " Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.017860 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-utilities" (OuterVolumeSpecName: "utilities") pod "382e598e-85bd-455e-b0f9-fe31ecb1ee73" (UID: "382e598e-85bd-455e-b0f9-fe31ecb1ee73"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.021812 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/382e598e-85bd-455e-b0f9-fe31ecb1ee73-kube-api-access-mxfh6" (OuterVolumeSpecName: "kube-api-access-mxfh6") pod "382e598e-85bd-455e-b0f9-fe31ecb1ee73" (UID: "382e598e-85bd-455e-b0f9-fe31ecb1ee73"). InnerVolumeSpecName "kube-api-access-mxfh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.067910 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "382e598e-85bd-455e-b0f9-fe31ecb1ee73" (UID: "382e598e-85bd-455e-b0f9-fe31ecb1ee73"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.117704 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.117733 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/382e598e-85bd-455e-b0f9-fe31ecb1ee73-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.117742 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxfh6\" (UniqueName: \"kubernetes.io/projected/382e598e-85bd-455e-b0f9-fe31ecb1ee73-kube-api-access-mxfh6\") on node \"crc\" DevicePath \"\"" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.453321 4816 generic.go:334] "Generic (PLEG): container finished" podID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerID="1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60" exitCode=0 Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.453383 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fd42" event={"ID":"382e598e-85bd-455e-b0f9-fe31ecb1ee73","Type":"ContainerDied","Data":"1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60"} Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.453683 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fd42" event={"ID":"382e598e-85bd-455e-b0f9-fe31ecb1ee73","Type":"ContainerDied","Data":"5895ef3b064270e07720a2e4eb0fd93bd2e93e761edf6a27bba018cf029f850d"} Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.453517 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6fd42" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.453713 4816 scope.go:117] "RemoveContainer" containerID="1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.495982 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6fd42"] Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.496703 4816 scope.go:117] "RemoveContainer" containerID="a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.510374 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6fd42"] Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.517627 4816 scope.go:117] "RemoveContainer" containerID="0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.545624 4816 scope.go:117] "RemoveContainer" containerID="1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60" Feb 16 13:29:21 crc kubenswrapper[4816]: E0216 13:29:21.546106 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60\": container with ID starting with 1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60 not found: ID does not exist" containerID="1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.546278 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60"} err="failed to get container status \"1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60\": rpc error: code = NotFound desc = could not find container \"1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60\": container with ID starting with 1879c4a9115b3d2c63105ca1c00ef9ac6adea57d151f2a867a0bb87c9752ec60 not found: ID does not exist" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.546398 4816 scope.go:117] "RemoveContainer" containerID="a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae" Feb 16 13:29:21 crc kubenswrapper[4816]: E0216 13:29:21.546943 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae\": container with ID starting with a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae not found: ID does not exist" containerID="a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.546981 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae"} err="failed to get container status \"a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae\": rpc error: code = NotFound desc = could not find container \"a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae\": container with ID starting with a6c6891b0227c21a9f82246728992ca227e943e4cacd844f856349bf94f2ddae not found: ID does not exist" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.547006 4816 scope.go:117] "RemoveContainer" containerID="0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef" Feb 16 13:29:21 crc kubenswrapper[4816]: E0216 13:29:21.547242 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef\": container with ID starting with 0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef not found: ID does not exist" containerID="0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef" Feb 16 13:29:21 crc kubenswrapper[4816]: I0216 13:29:21.547260 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef"} err="failed to get container status \"0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef\": rpc error: code = NotFound desc = could not find container \"0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef\": container with ID starting with 0879fff7581693221a42f58f5ec500d47f56b66a14cfa69b1c954f090708bcef not found: ID does not exist" Feb 16 13:29:23 crc kubenswrapper[4816]: I0216 13:29:23.409830 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" path="/var/lib/kubelet/pods/382e598e-85bd-455e-b0f9-fe31ecb1ee73/volumes" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.222570 4816 scope.go:117] "RemoveContainer" containerID="c3ee070672541f2475c7a5b84908e9db321ca75bcdb3b2238999b588eeb68da3" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.245297 4816 scope.go:117] "RemoveContainer" containerID="b657086e02b8ce8e458e27dbbbf41ae11481a85b47a0269bc3b1dae982a47a7e" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.285839 4816 scope.go:117] "RemoveContainer" containerID="305b8eb6bcfac360528db193c73952f20605bc0004e0f5602cffb736efb9d9ec" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.305643 4816 scope.go:117] "RemoveContainer" containerID="cf8f50b8e01719c98fb9295479623e2775e7238b5f15f7ebf0bc469d7d0b9a1b" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.350134 4816 scope.go:117] "RemoveContainer" containerID="f8ae73c126c363833f99df33e58ee290003dea459fae306bda2b1c0a48b29dd4" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.366758 4816 scope.go:117] "RemoveContainer" containerID="3ec7bb1fa094c748d9f0a438072eea2ded5beaac207ad9e94a2baa9e58d3d69f" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.385692 4816 scope.go:117] "RemoveContainer" containerID="0d91b1adca95b091698777b9b7071ab6794bc04d84d64b9e208e219713418072" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.409956 4816 scope.go:117] "RemoveContainer" containerID="0b08dd3d0fdfaadbe3e28364a7f5534f73e7435d1603256b3b6c48882b298347" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.454820 4816 scope.go:117] "RemoveContainer" containerID="6521ecb5649f87cfc503189d690f32f55c580c29870839eb0b39951412f666a0" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.479790 4816 scope.go:117] "RemoveContainer" containerID="df32c655695662af3f4785beadfd5108fbf95a04faa2884f5decc908c0f8b4c1" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.503461 4816 scope.go:117] "RemoveContainer" containerID="ba7efc3ec9c5dfa321c00c780802598dd93174bcbdddbffdf6d571339bef8440" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.536624 4816 scope.go:117] "RemoveContainer" containerID="0a37364f47721e42e8d7d8c8e7e0b76b9f09f0c7e0a00afcf23bbc67bb3d615e" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.559290 4816 scope.go:117] "RemoveContainer" containerID="378117e2c3b9f9411f85b556eb61749b15f23c041753736e75e440bb7444ab8e" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.584610 4816 scope.go:117] "RemoveContainer" containerID="e5420356a58f2a742209fa93eeb74cfe936d1655956ca551c31cf64d9fc74339" Feb 16 13:29:24 crc kubenswrapper[4816]: I0216 13:29:24.622030 4816 scope.go:117] "RemoveContainer" containerID="ed64af8ac2faddc8f5b3609993e7e85b7c02038ee89682aa306fb9d136d0c815" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.138901 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55"] Feb 16 13:30:00 crc kubenswrapper[4816]: E0216 13:30:00.139755 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerName="registry-server" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.139780 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerName="registry-server" Feb 16 13:30:00 crc kubenswrapper[4816]: E0216 13:30:00.139803 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerName="extract-content" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.139812 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerName="extract-content" Feb 16 13:30:00 crc kubenswrapper[4816]: E0216 13:30:00.139828 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerName="extract-utilities" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.139837 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerName="extract-utilities" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.139996 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="382e598e-85bd-455e-b0f9-fe31ecb1ee73" containerName="registry-server" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.140541 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.145015 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.145087 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.148392 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55"] Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.179311 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/825e57ca-7b7b-4a34-8211-0ea0e222f836-secret-volume\") pod \"collect-profiles-29520810-xhd55\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.179627 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hqj4\" (UniqueName: \"kubernetes.io/projected/825e57ca-7b7b-4a34-8211-0ea0e222f836-kube-api-access-4hqj4\") pod \"collect-profiles-29520810-xhd55\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.179816 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/825e57ca-7b7b-4a34-8211-0ea0e222f836-config-volume\") pod \"collect-profiles-29520810-xhd55\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.281864 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/825e57ca-7b7b-4a34-8211-0ea0e222f836-secret-volume\") pod \"collect-profiles-29520810-xhd55\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.281922 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hqj4\" (UniqueName: \"kubernetes.io/projected/825e57ca-7b7b-4a34-8211-0ea0e222f836-kube-api-access-4hqj4\") pod \"collect-profiles-29520810-xhd55\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.281994 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/825e57ca-7b7b-4a34-8211-0ea0e222f836-config-volume\") pod \"collect-profiles-29520810-xhd55\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.283249 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/825e57ca-7b7b-4a34-8211-0ea0e222f836-config-volume\") pod \"collect-profiles-29520810-xhd55\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.289788 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/825e57ca-7b7b-4a34-8211-0ea0e222f836-secret-volume\") pod \"collect-profiles-29520810-xhd55\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.303255 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hqj4\" (UniqueName: \"kubernetes.io/projected/825e57ca-7b7b-4a34-8211-0ea0e222f836-kube-api-access-4hqj4\") pod \"collect-profiles-29520810-xhd55\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.494840 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:00 crc kubenswrapper[4816]: I0216 13:30:00.934945 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55"] Feb 16 13:30:01 crc kubenswrapper[4816]: I0216 13:30:01.803808 4816 generic.go:334] "Generic (PLEG): container finished" podID="825e57ca-7b7b-4a34-8211-0ea0e222f836" containerID="7051296f9781ee33810fdf36264bc8ace4ae05d908028c2e3f50e035cf453c06" exitCode=0 Feb 16 13:30:01 crc kubenswrapper[4816]: I0216 13:30:01.804029 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" event={"ID":"825e57ca-7b7b-4a34-8211-0ea0e222f836","Type":"ContainerDied","Data":"7051296f9781ee33810fdf36264bc8ace4ae05d908028c2e3f50e035cf453c06"} Feb 16 13:30:01 crc kubenswrapper[4816]: I0216 13:30:01.804112 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" event={"ID":"825e57ca-7b7b-4a34-8211-0ea0e222f836","Type":"ContainerStarted","Data":"86500982d57b3154128a8d049f040ce109dafcfffe8718ce10bd0b099bb8b337"} Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.110422 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.121216 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/825e57ca-7b7b-4a34-8211-0ea0e222f836-secret-volume\") pod \"825e57ca-7b7b-4a34-8211-0ea0e222f836\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.121275 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/825e57ca-7b7b-4a34-8211-0ea0e222f836-config-volume\") pod \"825e57ca-7b7b-4a34-8211-0ea0e222f836\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.121340 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hqj4\" (UniqueName: \"kubernetes.io/projected/825e57ca-7b7b-4a34-8211-0ea0e222f836-kube-api-access-4hqj4\") pod \"825e57ca-7b7b-4a34-8211-0ea0e222f836\" (UID: \"825e57ca-7b7b-4a34-8211-0ea0e222f836\") " Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.122111 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/825e57ca-7b7b-4a34-8211-0ea0e222f836-config-volume" (OuterVolumeSpecName: "config-volume") pod "825e57ca-7b7b-4a34-8211-0ea0e222f836" (UID: "825e57ca-7b7b-4a34-8211-0ea0e222f836"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.127594 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/825e57ca-7b7b-4a34-8211-0ea0e222f836-kube-api-access-4hqj4" (OuterVolumeSpecName: "kube-api-access-4hqj4") pod "825e57ca-7b7b-4a34-8211-0ea0e222f836" (UID: "825e57ca-7b7b-4a34-8211-0ea0e222f836"). InnerVolumeSpecName "kube-api-access-4hqj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.129769 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/825e57ca-7b7b-4a34-8211-0ea0e222f836-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "825e57ca-7b7b-4a34-8211-0ea0e222f836" (UID: "825e57ca-7b7b-4a34-8211-0ea0e222f836"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.222254 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hqj4\" (UniqueName: \"kubernetes.io/projected/825e57ca-7b7b-4a34-8211-0ea0e222f836-kube-api-access-4hqj4\") on node \"crc\" DevicePath \"\"" Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.222303 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/825e57ca-7b7b-4a34-8211-0ea0e222f836-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.222314 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/825e57ca-7b7b-4a34-8211-0ea0e222f836-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.821148 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" event={"ID":"825e57ca-7b7b-4a34-8211-0ea0e222f836","Type":"ContainerDied","Data":"86500982d57b3154128a8d049f040ce109dafcfffe8718ce10bd0b099bb8b337"} Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.821187 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86500982d57b3154128a8d049f040ce109dafcfffe8718ce10bd0b099bb8b337" Feb 16 13:30:03 crc kubenswrapper[4816]: I0216 13:30:03.821256 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55" Feb 16 13:30:24 crc kubenswrapper[4816]: I0216 13:30:24.784082 4816 scope.go:117] "RemoveContainer" containerID="b4a948c900f30d0434262ce037301027d88ee90b747aed9a2dc7d36fc8c7b454" Feb 16 13:30:24 crc kubenswrapper[4816]: I0216 13:30:24.824786 4816 scope.go:117] "RemoveContainer" containerID="8a476b5ff55d4104e20d7f1bdc31c6ef74cfa237527ee4844d863aa85dd3bdb1" Feb 16 13:30:24 crc kubenswrapper[4816]: I0216 13:30:24.859349 4816 scope.go:117] "RemoveContainer" containerID="be17341e9a947a7465a0a48dedaaad75abdb811c1f90696013b655b457845fe7" Feb 16 13:30:24 crc kubenswrapper[4816]: I0216 13:30:24.911590 4816 scope.go:117] "RemoveContainer" containerID="b6ca05adab300fd9ccf2880dc760585e09aaf0866632bf3a934d424a6c6e0afc" Feb 16 13:30:24 crc kubenswrapper[4816]: I0216 13:30:24.933537 4816 scope.go:117] "RemoveContainer" containerID="d4e585d8c04d34ad4f94e9d269ca13f9a08aa393622c9cc919a2c7459810db9a" Feb 16 13:30:24 crc kubenswrapper[4816]: I0216 13:30:24.963851 4816 scope.go:117] "RemoveContainer" containerID="b1b0de881416dfc3b1efa9b3cceea64ff96d9d1db02f7c7b5b1c9240e3757b44" Feb 16 13:30:24 crc kubenswrapper[4816]: I0216 13:30:24.996562 4816 scope.go:117] "RemoveContainer" containerID="027e08dafffb74262275075dc8c9d08656e18039ee81855c621d571ba9668edb" Feb 16 13:30:25 crc kubenswrapper[4816]: I0216 13:30:25.017144 4816 scope.go:117] "RemoveContainer" containerID="b594b400e21605362a39b0644bd2c43537ea857aedc6e60fe673ee3964203cf8" Feb 16 13:30:25 crc kubenswrapper[4816]: I0216 13:30:25.039834 4816 scope.go:117] "RemoveContainer" containerID="56e385b20d8b4b9b80ca388d17160eb68951aa4e48f004e0a892e63e727fe0b0" Feb 16 13:30:25 crc kubenswrapper[4816]: I0216 13:30:25.082563 4816 scope.go:117] "RemoveContainer" containerID="96a26d4a869ea29f2d4c6578c22ea996c7dcf18bdf02530d972d1e05dfd28823" Feb 16 13:30:25 crc kubenswrapper[4816]: I0216 13:30:25.102978 4816 scope.go:117] "RemoveContainer" containerID="a8e282eef394bdb8b3559f783af4f640c6a7bb4f9d7e1a7ac0a8e8e3c4b0bafb" Feb 16 13:30:25 crc kubenswrapper[4816]: I0216 13:30:25.131191 4816 scope.go:117] "RemoveContainer" containerID="24d21dd10427dd9ed85ee969b127ac599b8e7432a15c1f720ab6ea7f8f1c0d2b" Feb 16 13:30:25 crc kubenswrapper[4816]: I0216 13:30:25.150932 4816 scope.go:117] "RemoveContainer" containerID="474929eac836803381bab25d53cae980ee7dbac3ad8d6c4038f390f093bca57e" Feb 16 13:30:25 crc kubenswrapper[4816]: I0216 13:30:25.167516 4816 scope.go:117] "RemoveContainer" containerID="e1555ec2ef7c331226bf5f3c9dd304c9a719ac9687ba3e0531c29bd7c838c76b" Feb 16 13:30:36 crc kubenswrapper[4816]: I0216 13:30:36.941169 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:30:36 crc kubenswrapper[4816]: I0216 13:30:36.941773 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:31:06 crc kubenswrapper[4816]: I0216 13:31:06.940636 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:31:06 crc kubenswrapper[4816]: I0216 13:31:06.941387 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.339539 4816 scope.go:117] "RemoveContainer" containerID="76ec843c703c6b17fc54715a57c64cbeba5f4bb5c4680e10148f367c247fc0b6" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.371008 4816 scope.go:117] "RemoveContainer" containerID="eaecabde41b4e021829e9c54ac76b1d41288afa9788f12cfc4efc87303bf69b9" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.396867 4816 scope.go:117] "RemoveContainer" containerID="2421e0c16ddc2a0653671ff82c882fdcc48898092dd506013711b5886e1eb877" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.549455 4816 scope.go:117] "RemoveContainer" containerID="4bb52d950b8046a9e69a5960c76c6af1619b06ddc7d4e281fdf48cfbcb9eceb0" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.565292 4816 scope.go:117] "RemoveContainer" containerID="5c0de90e8dcfab5d056886954e128878aef7cca94e4c5037f744e9127ed2ae39" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.579384 4816 scope.go:117] "RemoveContainer" containerID="ffcda6952756ad6d6adbadba9e616f946581ed750ee58705820ad16489371010" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.615566 4816 scope.go:117] "RemoveContainer" containerID="8d9d8628d67d6b4cddd6e243d4b090aa9289ab58d9a0db8e8d50f4d420889b21" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.635007 4816 scope.go:117] "RemoveContainer" containerID="d9fc6c343abd74ee8d6a07cccee8b0dfa8b3d649e3953364120a36b4b19f99b6" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.660012 4816 scope.go:117] "RemoveContainer" containerID="527ce40525cb7c9b030d6afba0202147d041220b639540ecbe06dabb3e1425e2" Feb 16 13:31:25 crc kubenswrapper[4816]: I0216 13:31:25.682374 4816 scope.go:117] "RemoveContainer" containerID="de70394506e788b0a9d01206ee022d4664882aace80a609a88569836fca38d8d" Feb 16 13:31:36 crc kubenswrapper[4816]: I0216 13:31:36.940918 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:31:36 crc kubenswrapper[4816]: I0216 13:31:36.941721 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:31:36 crc kubenswrapper[4816]: I0216 13:31:36.941812 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:31:36 crc kubenswrapper[4816]: I0216 13:31:36.942916 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:31:36 crc kubenswrapper[4816]: I0216 13:31:36.943255 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" gracePeriod=600 Feb 16 13:31:37 crc kubenswrapper[4816]: E0216 13:31:37.069367 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:31:37 crc kubenswrapper[4816]: I0216 13:31:37.661850 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" exitCode=0 Feb 16 13:31:37 crc kubenswrapper[4816]: I0216 13:31:37.661920 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2"} Feb 16 13:31:37 crc kubenswrapper[4816]: I0216 13:31:37.661979 4816 scope.go:117] "RemoveContainer" containerID="95aa8a6a6708ed2afc1dbaedfa775efb1b58e3f80a0695507163b402563b9cf2" Feb 16 13:31:37 crc kubenswrapper[4816]: I0216 13:31:37.662744 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:31:37 crc kubenswrapper[4816]: E0216 13:31:37.663285 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:31:53 crc kubenswrapper[4816]: I0216 13:31:53.398767 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:31:53 crc kubenswrapper[4816]: E0216 13:31:53.399786 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:32:06 crc kubenswrapper[4816]: I0216 13:32:06.401911 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:32:06 crc kubenswrapper[4816]: E0216 13:32:06.402999 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:32:17 crc kubenswrapper[4816]: I0216 13:32:17.398495 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:32:17 crc kubenswrapper[4816]: E0216 13:32:17.399116 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:32:25 crc kubenswrapper[4816]: I0216 13:32:25.836694 4816 scope.go:117] "RemoveContainer" containerID="b8de906ff2b39c6986fa45580797f26d39d0277c32281ccb7e9fc35db0d673c3" Feb 16 13:32:28 crc kubenswrapper[4816]: I0216 13:32:28.398944 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:32:28 crc kubenswrapper[4816]: E0216 13:32:28.399328 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:32:41 crc kubenswrapper[4816]: I0216 13:32:41.407537 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:32:41 crc kubenswrapper[4816]: E0216 13:32:41.408610 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:32:53 crc kubenswrapper[4816]: I0216 13:32:53.398680 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:32:53 crc kubenswrapper[4816]: E0216 13:32:53.399307 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:33:07 crc kubenswrapper[4816]: I0216 13:33:07.398688 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:33:07 crc kubenswrapper[4816]: E0216 13:33:07.399414 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:33:20 crc kubenswrapper[4816]: I0216 13:33:20.398490 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:33:20 crc kubenswrapper[4816]: E0216 13:33:20.399202 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:33:34 crc kubenswrapper[4816]: I0216 13:33:34.398712 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:33:34 crc kubenswrapper[4816]: E0216 13:33:34.399420 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:33:47 crc kubenswrapper[4816]: I0216 13:33:47.399024 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:33:47 crc kubenswrapper[4816]: E0216 13:33:47.399696 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:34:01 crc kubenswrapper[4816]: I0216 13:34:01.402579 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:34:01 crc kubenswrapper[4816]: E0216 13:34:01.403409 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:34:16 crc kubenswrapper[4816]: I0216 13:34:16.398447 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:34:16 crc kubenswrapper[4816]: E0216 13:34:16.399249 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:34:30 crc kubenswrapper[4816]: I0216 13:34:30.399049 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:34:30 crc kubenswrapper[4816]: E0216 13:34:30.399942 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:34:43 crc kubenswrapper[4816]: I0216 13:34:43.398248 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:34:43 crc kubenswrapper[4816]: E0216 13:34:43.399010 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:34:57 crc kubenswrapper[4816]: I0216 13:34:57.398574 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:34:57 crc kubenswrapper[4816]: E0216 13:34:57.401131 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:35:12 crc kubenswrapper[4816]: I0216 13:35:12.398533 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:35:12 crc kubenswrapper[4816]: E0216 13:35:12.399476 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:35:25 crc kubenswrapper[4816]: I0216 13:35:25.399207 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:35:25 crc kubenswrapper[4816]: E0216 13:35:25.399994 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:35:36 crc kubenswrapper[4816]: I0216 13:35:36.399977 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:35:36 crc kubenswrapper[4816]: E0216 13:35:36.401283 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:35:47 crc kubenswrapper[4816]: I0216 13:35:47.398620 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:35:47 crc kubenswrapper[4816]: E0216 13:35:47.399583 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:36:00 crc kubenswrapper[4816]: I0216 13:36:00.398810 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:36:00 crc kubenswrapper[4816]: E0216 13:36:00.399504 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:36:12 crc kubenswrapper[4816]: I0216 13:36:12.398650 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:36:12 crc kubenswrapper[4816]: E0216 13:36:12.399507 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:36:25 crc kubenswrapper[4816]: I0216 13:36:25.930335 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nw4lp"] Feb 16 13:36:25 crc kubenswrapper[4816]: E0216 13:36:25.931353 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="825e57ca-7b7b-4a34-8211-0ea0e222f836" containerName="collect-profiles" Feb 16 13:36:25 crc kubenswrapper[4816]: I0216 13:36:25.931368 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="825e57ca-7b7b-4a34-8211-0ea0e222f836" containerName="collect-profiles" Feb 16 13:36:25 crc kubenswrapper[4816]: I0216 13:36:25.931542 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="825e57ca-7b7b-4a34-8211-0ea0e222f836" containerName="collect-profiles" Feb 16 13:36:25 crc kubenswrapper[4816]: I0216 13:36:25.932740 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:25 crc kubenswrapper[4816]: I0216 13:36:25.955333 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw4lp"] Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.071173 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-catalog-content\") pod \"redhat-marketplace-nw4lp\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.071472 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-utilities\") pod \"redhat-marketplace-nw4lp\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.071576 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s8nf\" (UniqueName: \"kubernetes.io/projected/b8015d3a-d625-496c-b32c-45545725b1c6-kube-api-access-6s8nf\") pod \"redhat-marketplace-nw4lp\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.172698 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-catalog-content\") pod \"redhat-marketplace-nw4lp\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.172739 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-utilities\") pod \"redhat-marketplace-nw4lp\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.172762 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s8nf\" (UniqueName: \"kubernetes.io/projected/b8015d3a-d625-496c-b32c-45545725b1c6-kube-api-access-6s8nf\") pod \"redhat-marketplace-nw4lp\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.173254 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-catalog-content\") pod \"redhat-marketplace-nw4lp\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.173632 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-utilities\") pod \"redhat-marketplace-nw4lp\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.196462 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s8nf\" (UniqueName: \"kubernetes.io/projected/b8015d3a-d625-496c-b32c-45545725b1c6-kube-api-access-6s8nf\") pod \"redhat-marketplace-nw4lp\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.252129 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:26 crc kubenswrapper[4816]: I0216 13:36:26.735600 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw4lp"] Feb 16 13:36:26 crc kubenswrapper[4816]: W0216 13:36:26.748065 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8015d3a_d625_496c_b32c_45545725b1c6.slice/crio-1b5aa53e0b039d5fdb89ae6d57d69068886b26e188d49b92e089d25f00a4f8fe WatchSource:0}: Error finding container 1b5aa53e0b039d5fdb89ae6d57d69068886b26e188d49b92e089d25f00a4f8fe: Status 404 returned error can't find the container with id 1b5aa53e0b039d5fdb89ae6d57d69068886b26e188d49b92e089d25f00a4f8fe Feb 16 13:36:27 crc kubenswrapper[4816]: I0216 13:36:27.101139 4816 generic.go:334] "Generic (PLEG): container finished" podID="b8015d3a-d625-496c-b32c-45545725b1c6" containerID="64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e" exitCode=0 Feb 16 13:36:27 crc kubenswrapper[4816]: I0216 13:36:27.101193 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw4lp" event={"ID":"b8015d3a-d625-496c-b32c-45545725b1c6","Type":"ContainerDied","Data":"64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e"} Feb 16 13:36:27 crc kubenswrapper[4816]: I0216 13:36:27.101222 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw4lp" event={"ID":"b8015d3a-d625-496c-b32c-45545725b1c6","Type":"ContainerStarted","Data":"1b5aa53e0b039d5fdb89ae6d57d69068886b26e188d49b92e089d25f00a4f8fe"} Feb 16 13:36:27 crc kubenswrapper[4816]: I0216 13:36:27.103123 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 13:36:27 crc kubenswrapper[4816]: I0216 13:36:27.398933 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:36:27 crc kubenswrapper[4816]: E0216 13:36:27.400264 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:36:28 crc kubenswrapper[4816]: I0216 13:36:28.112016 4816 generic.go:334] "Generic (PLEG): container finished" podID="b8015d3a-d625-496c-b32c-45545725b1c6" containerID="5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978" exitCode=0 Feb 16 13:36:28 crc kubenswrapper[4816]: I0216 13:36:28.112073 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw4lp" event={"ID":"b8015d3a-d625-496c-b32c-45545725b1c6","Type":"ContainerDied","Data":"5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978"} Feb 16 13:36:29 crc kubenswrapper[4816]: I0216 13:36:29.142729 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw4lp" event={"ID":"b8015d3a-d625-496c-b32c-45545725b1c6","Type":"ContainerStarted","Data":"a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137"} Feb 16 13:36:29 crc kubenswrapper[4816]: I0216 13:36:29.165731 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nw4lp" podStartSLOduration=2.734145584 podStartE2EDuration="4.165641489s" podCreationTimestamp="2026-02-16 13:36:25 +0000 UTC" firstStartedPulling="2026-02-16 13:36:27.102787956 +0000 UTC m=+1986.429501684" lastFinishedPulling="2026-02-16 13:36:28.534283841 +0000 UTC m=+1987.860997589" observedRunningTime="2026-02-16 13:36:29.160801837 +0000 UTC m=+1988.487515565" watchObservedRunningTime="2026-02-16 13:36:29.165641489 +0000 UTC m=+1988.492355217" Feb 16 13:36:36 crc kubenswrapper[4816]: I0216 13:36:36.252333 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:36 crc kubenswrapper[4816]: I0216 13:36:36.252939 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:36 crc kubenswrapper[4816]: I0216 13:36:36.320553 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:37 crc kubenswrapper[4816]: I0216 13:36:37.263445 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:37 crc kubenswrapper[4816]: I0216 13:36:37.334358 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw4lp"] Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.224382 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nw4lp" podUID="b8015d3a-d625-496c-b32c-45545725b1c6" containerName="registry-server" containerID="cri-o://a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137" gracePeriod=2 Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.399981 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.630919 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.680309 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-utilities\") pod \"b8015d3a-d625-496c-b32c-45545725b1c6\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.680387 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6s8nf\" (UniqueName: \"kubernetes.io/projected/b8015d3a-d625-496c-b32c-45545725b1c6-kube-api-access-6s8nf\") pod \"b8015d3a-d625-496c-b32c-45545725b1c6\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.680420 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-catalog-content\") pod \"b8015d3a-d625-496c-b32c-45545725b1c6\" (UID: \"b8015d3a-d625-496c-b32c-45545725b1c6\") " Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.681434 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-utilities" (OuterVolumeSpecName: "utilities") pod "b8015d3a-d625-496c-b32c-45545725b1c6" (UID: "b8015d3a-d625-496c-b32c-45545725b1c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.685505 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8015d3a-d625-496c-b32c-45545725b1c6-kube-api-access-6s8nf" (OuterVolumeSpecName: "kube-api-access-6s8nf") pod "b8015d3a-d625-496c-b32c-45545725b1c6" (UID: "b8015d3a-d625-496c-b32c-45545725b1c6"). InnerVolumeSpecName "kube-api-access-6s8nf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.705351 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8015d3a-d625-496c-b32c-45545725b1c6" (UID: "b8015d3a-d625-496c-b32c-45545725b1c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.781960 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.781992 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6s8nf\" (UniqueName: \"kubernetes.io/projected/b8015d3a-d625-496c-b32c-45545725b1c6-kube-api-access-6s8nf\") on node \"crc\" DevicePath \"\"" Feb 16 13:36:39 crc kubenswrapper[4816]: I0216 13:36:39.782005 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8015d3a-d625-496c-b32c-45545725b1c6-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.232959 4816 generic.go:334] "Generic (PLEG): container finished" podID="b8015d3a-d625-496c-b32c-45545725b1c6" containerID="a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137" exitCode=0 Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.233383 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nw4lp" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.233686 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw4lp" event={"ID":"b8015d3a-d625-496c-b32c-45545725b1c6","Type":"ContainerDied","Data":"a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137"} Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.233731 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nw4lp" event={"ID":"b8015d3a-d625-496c-b32c-45545725b1c6","Type":"ContainerDied","Data":"1b5aa53e0b039d5fdb89ae6d57d69068886b26e188d49b92e089d25f00a4f8fe"} Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.233751 4816 scope.go:117] "RemoveContainer" containerID="a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.242198 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"70d0b74ffdd0e7a7f6eb6db66547673703429a926b5be987dc4f94f5c3f7d9a9"} Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.272738 4816 scope.go:117] "RemoveContainer" containerID="5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.294172 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw4lp"] Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.302843 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nw4lp"] Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.317642 4816 scope.go:117] "RemoveContainer" containerID="64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.334566 4816 scope.go:117] "RemoveContainer" containerID="a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137" Feb 16 13:36:40 crc kubenswrapper[4816]: E0216 13:36:40.335004 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137\": container with ID starting with a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137 not found: ID does not exist" containerID="a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.335097 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137"} err="failed to get container status \"a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137\": rpc error: code = NotFound desc = could not find container \"a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137\": container with ID starting with a350308cd64db8e9172bb7aea093ce71a026d16ece87bd1289b38db2b2bdc137 not found: ID does not exist" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.335173 4816 scope.go:117] "RemoveContainer" containerID="5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978" Feb 16 13:36:40 crc kubenswrapper[4816]: E0216 13:36:40.335497 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978\": container with ID starting with 5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978 not found: ID does not exist" containerID="5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.335578 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978"} err="failed to get container status \"5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978\": rpc error: code = NotFound desc = could not find container \"5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978\": container with ID starting with 5e0060c069b7804a5b17489128a7d34cd1f7df60cb58ec2fe23b04a7aa372978 not found: ID does not exist" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.335678 4816 scope.go:117] "RemoveContainer" containerID="64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e" Feb 16 13:36:40 crc kubenswrapper[4816]: E0216 13:36:40.336062 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e\": container with ID starting with 64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e not found: ID does not exist" containerID="64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e" Feb 16 13:36:40 crc kubenswrapper[4816]: I0216 13:36:40.336104 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e"} err="failed to get container status \"64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e\": rpc error: code = NotFound desc = could not find container \"64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e\": container with ID starting with 64c3fc00fa5853d4aa5af6166d7f316ab52340c593595cd52596fd7013dee59e not found: ID does not exist" Feb 16 13:36:41 crc kubenswrapper[4816]: I0216 13:36:41.406952 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8015d3a-d625-496c-b32c-45545725b1c6" path="/var/lib/kubelet/pods/b8015d3a-d625-496c-b32c-45545725b1c6/volumes" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.002123 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ph8ch"] Feb 16 13:37:06 crc kubenswrapper[4816]: E0216 13:37:06.003571 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8015d3a-d625-496c-b32c-45545725b1c6" containerName="registry-server" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.003619 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8015d3a-d625-496c-b32c-45545725b1c6" containerName="registry-server" Feb 16 13:37:06 crc kubenswrapper[4816]: E0216 13:37:06.003703 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8015d3a-d625-496c-b32c-45545725b1c6" containerName="extract-content" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.003717 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8015d3a-d625-496c-b32c-45545725b1c6" containerName="extract-content" Feb 16 13:37:06 crc kubenswrapper[4816]: E0216 13:37:06.003748 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8015d3a-d625-496c-b32c-45545725b1c6" containerName="extract-utilities" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.003761 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8015d3a-d625-496c-b32c-45545725b1c6" containerName="extract-utilities" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.004036 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8015d3a-d625-496c-b32c-45545725b1c6" containerName="registry-server" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.006271 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.028091 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ph8ch"] Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.107305 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-utilities\") pod \"redhat-operators-ph8ch\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.107813 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6m5j\" (UniqueName: \"kubernetes.io/projected/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-kube-api-access-x6m5j\") pod \"redhat-operators-ph8ch\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.107875 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-catalog-content\") pod \"redhat-operators-ph8ch\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.211097 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-catalog-content\") pod \"redhat-operators-ph8ch\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.211242 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-utilities\") pod \"redhat-operators-ph8ch\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.211360 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6m5j\" (UniqueName: \"kubernetes.io/projected/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-kube-api-access-x6m5j\") pod \"redhat-operators-ph8ch\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.212526 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-catalog-content\") pod \"redhat-operators-ph8ch\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.213142 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-utilities\") pod \"redhat-operators-ph8ch\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.242578 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6m5j\" (UniqueName: \"kubernetes.io/projected/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-kube-api-access-x6m5j\") pod \"redhat-operators-ph8ch\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.353296 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:06 crc kubenswrapper[4816]: I0216 13:37:06.828043 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ph8ch"] Feb 16 13:37:07 crc kubenswrapper[4816]: I0216 13:37:07.471510 4816 generic.go:334] "Generic (PLEG): container finished" podID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerID="57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81" exitCode=0 Feb 16 13:37:07 crc kubenswrapper[4816]: I0216 13:37:07.471593 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph8ch" event={"ID":"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c","Type":"ContainerDied","Data":"57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81"} Feb 16 13:37:07 crc kubenswrapper[4816]: I0216 13:37:07.471693 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph8ch" event={"ID":"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c","Type":"ContainerStarted","Data":"8d1bc63901637f3606375152eb7f87e2237fcfc736496dd48e5579d3526ac3de"} Feb 16 13:37:08 crc kubenswrapper[4816]: I0216 13:37:08.479421 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph8ch" event={"ID":"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c","Type":"ContainerStarted","Data":"8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5"} Feb 16 13:37:09 crc kubenswrapper[4816]: I0216 13:37:09.488632 4816 generic.go:334] "Generic (PLEG): container finished" podID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerID="8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5" exitCode=0 Feb 16 13:37:09 crc kubenswrapper[4816]: I0216 13:37:09.488737 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph8ch" event={"ID":"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c","Type":"ContainerDied","Data":"8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5"} Feb 16 13:37:10 crc kubenswrapper[4816]: I0216 13:37:10.499621 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph8ch" event={"ID":"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c","Type":"ContainerStarted","Data":"092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9"} Feb 16 13:37:10 crc kubenswrapper[4816]: I0216 13:37:10.546250 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ph8ch" podStartSLOduration=3.148809761 podStartE2EDuration="5.546226212s" podCreationTimestamp="2026-02-16 13:37:05 +0000 UTC" firstStartedPulling="2026-02-16 13:37:07.474823983 +0000 UTC m=+2026.801537711" lastFinishedPulling="2026-02-16 13:37:09.872240414 +0000 UTC m=+2029.198954162" observedRunningTime="2026-02-16 13:37:10.538741328 +0000 UTC m=+2029.865455066" watchObservedRunningTime="2026-02-16 13:37:10.546226212 +0000 UTC m=+2029.872939940" Feb 16 13:37:16 crc kubenswrapper[4816]: I0216 13:37:16.353927 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:16 crc kubenswrapper[4816]: I0216 13:37:16.354445 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:17 crc kubenswrapper[4816]: I0216 13:37:17.431609 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ph8ch" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerName="registry-server" probeResult="failure" output=< Feb 16 13:37:17 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 13:37:17 crc kubenswrapper[4816]: > Feb 16 13:37:26 crc kubenswrapper[4816]: I0216 13:37:26.394783 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:26 crc kubenswrapper[4816]: I0216 13:37:26.446387 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:26 crc kubenswrapper[4816]: I0216 13:37:26.641957 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ph8ch"] Feb 16 13:37:27 crc kubenswrapper[4816]: I0216 13:37:27.629493 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ph8ch" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerName="registry-server" containerID="cri-o://092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9" gracePeriod=2 Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.021331 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.133098 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6m5j\" (UniqueName: \"kubernetes.io/projected/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-kube-api-access-x6m5j\") pod \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.133143 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-catalog-content\") pod \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.133183 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-utilities\") pod \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\" (UID: \"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c\") " Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.134929 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-utilities" (OuterVolumeSpecName: "utilities") pod "0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" (UID: "0b386cd9-f8c0-45e9-b3ee-3b7acb77746c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.138913 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-kube-api-access-x6m5j" (OuterVolumeSpecName: "kube-api-access-x6m5j") pod "0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" (UID: "0b386cd9-f8c0-45e9-b3ee-3b7acb77746c"). InnerVolumeSpecName "kube-api-access-x6m5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.235973 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6m5j\" (UniqueName: \"kubernetes.io/projected/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-kube-api-access-x6m5j\") on node \"crc\" DevicePath \"\"" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.236075 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.276313 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" (UID: "0b386cd9-f8c0-45e9-b3ee-3b7acb77746c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.336858 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.638400 4816 generic.go:334] "Generic (PLEG): container finished" podID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerID="092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9" exitCode=0 Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.638437 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph8ch" event={"ID":"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c","Type":"ContainerDied","Data":"092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9"} Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.638463 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph8ch" event={"ID":"0b386cd9-f8c0-45e9-b3ee-3b7acb77746c","Type":"ContainerDied","Data":"8d1bc63901637f3606375152eb7f87e2237fcfc736496dd48e5579d3526ac3de"} Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.638480 4816 scope.go:117] "RemoveContainer" containerID="092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.638595 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ph8ch" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.670812 4816 scope.go:117] "RemoveContainer" containerID="8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.691674 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ph8ch"] Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.702537 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ph8ch"] Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.719060 4816 scope.go:117] "RemoveContainer" containerID="57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.751957 4816 scope.go:117] "RemoveContainer" containerID="092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9" Feb 16 13:37:28 crc kubenswrapper[4816]: E0216 13:37:28.752422 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9\": container with ID starting with 092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9 not found: ID does not exist" containerID="092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.752462 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9"} err="failed to get container status \"092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9\": rpc error: code = NotFound desc = could not find container \"092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9\": container with ID starting with 092234368d97486d21605e38d8418fda9af2b215665ddb9a5d6b496c468eaea9 not found: ID does not exist" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.752484 4816 scope.go:117] "RemoveContainer" containerID="8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5" Feb 16 13:37:28 crc kubenswrapper[4816]: E0216 13:37:28.753031 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5\": container with ID starting with 8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5 not found: ID does not exist" containerID="8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.753055 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5"} err="failed to get container status \"8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5\": rpc error: code = NotFound desc = could not find container \"8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5\": container with ID starting with 8e37ce273ad384139d402f9adb73ade7c24ec56ae47e323293060d07e3314ab5 not found: ID does not exist" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.753073 4816 scope.go:117] "RemoveContainer" containerID="57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81" Feb 16 13:37:28 crc kubenswrapper[4816]: E0216 13:37:28.753417 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81\": container with ID starting with 57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81 not found: ID does not exist" containerID="57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81" Feb 16 13:37:28 crc kubenswrapper[4816]: I0216 13:37:28.753440 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81"} err="failed to get container status \"57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81\": rpc error: code = NotFound desc = could not find container \"57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81\": container with ID starting with 57e0bc1dc7aed9b17223a1005a9bb51f9a4a7a183b9da0706dadd9ae6e0a8b81 not found: ID does not exist" Feb 16 13:37:29 crc kubenswrapper[4816]: I0216 13:37:29.407044 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" path="/var/lib/kubelet/pods/0b386cd9-f8c0-45e9-b3ee-3b7acb77746c/volumes" Feb 16 13:39:06 crc kubenswrapper[4816]: I0216 13:39:06.940788 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:39:06 crc kubenswrapper[4816]: I0216 13:39:06.941293 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:39:36 crc kubenswrapper[4816]: I0216 13:39:36.941318 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:39:36 crc kubenswrapper[4816]: I0216 13:39:36.942014 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:40:06 crc kubenswrapper[4816]: I0216 13:40:06.940596 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:40:06 crc kubenswrapper[4816]: I0216 13:40:06.941109 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:40:06 crc kubenswrapper[4816]: I0216 13:40:06.941147 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:40:06 crc kubenswrapper[4816]: I0216 13:40:06.941784 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"70d0b74ffdd0e7a7f6eb6db66547673703429a926b5be987dc4f94f5c3f7d9a9"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:40:06 crc kubenswrapper[4816]: I0216 13:40:06.941838 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://70d0b74ffdd0e7a7f6eb6db66547673703429a926b5be987dc4f94f5c3f7d9a9" gracePeriod=600 Feb 16 13:40:07 crc kubenswrapper[4816]: I0216 13:40:07.923587 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="70d0b74ffdd0e7a7f6eb6db66547673703429a926b5be987dc4f94f5c3f7d9a9" exitCode=0 Feb 16 13:40:07 crc kubenswrapper[4816]: I0216 13:40:07.923666 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"70d0b74ffdd0e7a7f6eb6db66547673703429a926b5be987dc4f94f5c3f7d9a9"} Feb 16 13:40:07 crc kubenswrapper[4816]: I0216 13:40:07.923941 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a"} Feb 16 13:40:07 crc kubenswrapper[4816]: I0216 13:40:07.923961 4816 scope.go:117] "RemoveContainer" containerID="4da3acd7398775da0f00e1593e10bbfa4e0a3bcebad9e39fb91dd0e73e0f0aa2" Feb 16 13:42:36 crc kubenswrapper[4816]: I0216 13:42:36.940895 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:42:36 crc kubenswrapper[4816]: I0216 13:42:36.941350 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:43:06 crc kubenswrapper[4816]: I0216 13:43:06.941150 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:43:06 crc kubenswrapper[4816]: I0216 13:43:06.941774 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:43:36 crc kubenswrapper[4816]: I0216 13:43:36.940857 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:43:36 crc kubenswrapper[4816]: I0216 13:43:36.941404 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:43:36 crc kubenswrapper[4816]: I0216 13:43:36.941439 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:43:36 crc kubenswrapper[4816]: I0216 13:43:36.941941 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:43:36 crc kubenswrapper[4816]: I0216 13:43:36.941991 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" gracePeriod=600 Feb 16 13:43:37 crc kubenswrapper[4816]: E0216 13:43:37.063847 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:43:37 crc kubenswrapper[4816]: I0216 13:43:37.857136 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" exitCode=0 Feb 16 13:43:37 crc kubenswrapper[4816]: I0216 13:43:37.857185 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a"} Feb 16 13:43:37 crc kubenswrapper[4816]: I0216 13:43:37.857470 4816 scope.go:117] "RemoveContainer" containerID="70d0b74ffdd0e7a7f6eb6db66547673703429a926b5be987dc4f94f5c3f7d9a9" Feb 16 13:43:37 crc kubenswrapper[4816]: I0216 13:43:37.858346 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:43:37 crc kubenswrapper[4816]: E0216 13:43:37.858808 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.609899 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8bb92"] Feb 16 13:43:44 crc kubenswrapper[4816]: E0216 13:43:44.610832 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerName="extract-utilities" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.610853 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerName="extract-utilities" Feb 16 13:43:44 crc kubenswrapper[4816]: E0216 13:43:44.610886 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerName="extract-content" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.610897 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerName="extract-content" Feb 16 13:43:44 crc kubenswrapper[4816]: E0216 13:43:44.610910 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerName="registry-server" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.610921 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerName="registry-server" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.611102 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b386cd9-f8c0-45e9-b3ee-3b7acb77746c" containerName="registry-server" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.612357 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.627750 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8bb92"] Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.732125 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-utilities\") pod \"community-operators-8bb92\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.732286 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pp4nb\" (UniqueName: \"kubernetes.io/projected/1690ac3d-29aa-4efe-a378-5d19ff514b9a-kube-api-access-pp4nb\") pod \"community-operators-8bb92\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.732356 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-catalog-content\") pod \"community-operators-8bb92\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.833245 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pp4nb\" (UniqueName: \"kubernetes.io/projected/1690ac3d-29aa-4efe-a378-5d19ff514b9a-kube-api-access-pp4nb\") pod \"community-operators-8bb92\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.833298 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-catalog-content\") pod \"community-operators-8bb92\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.833367 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-utilities\") pod \"community-operators-8bb92\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.834018 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-catalog-content\") pod \"community-operators-8bb92\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.834190 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-utilities\") pod \"community-operators-8bb92\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.859424 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pp4nb\" (UniqueName: \"kubernetes.io/projected/1690ac3d-29aa-4efe-a378-5d19ff514b9a-kube-api-access-pp4nb\") pod \"community-operators-8bb92\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:44 crc kubenswrapper[4816]: I0216 13:43:44.935268 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:45 crc kubenswrapper[4816]: I0216 13:43:45.212490 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8bb92"] Feb 16 13:43:45 crc kubenswrapper[4816]: I0216 13:43:45.928811 4816 generic.go:334] "Generic (PLEG): container finished" podID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerID="51e31e2d48ad63d51b30cee41aefe71f8edb0e94b3bab239e9ad1b8b08e28349" exitCode=0 Feb 16 13:43:45 crc kubenswrapper[4816]: I0216 13:43:45.928917 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bb92" event={"ID":"1690ac3d-29aa-4efe-a378-5d19ff514b9a","Type":"ContainerDied","Data":"51e31e2d48ad63d51b30cee41aefe71f8edb0e94b3bab239e9ad1b8b08e28349"} Feb 16 13:43:45 crc kubenswrapper[4816]: I0216 13:43:45.929413 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bb92" event={"ID":"1690ac3d-29aa-4efe-a378-5d19ff514b9a","Type":"ContainerStarted","Data":"95d9c12fa2d23b5d7eb42b31740e0a7ae6e68d0a45f6805c50c6036f1055adaf"} Feb 16 13:43:45 crc kubenswrapper[4816]: I0216 13:43:45.932115 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 13:43:46 crc kubenswrapper[4816]: I0216 13:43:46.810529 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2thpr"] Feb 16 13:43:46 crc kubenswrapper[4816]: I0216 13:43:46.812840 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:46 crc kubenswrapper[4816]: I0216 13:43:46.841512 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2thpr"] Feb 16 13:43:46 crc kubenswrapper[4816]: I0216 13:43:46.937760 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bb92" event={"ID":"1690ac3d-29aa-4efe-a378-5d19ff514b9a","Type":"ContainerStarted","Data":"f813a3e252ab40f28b0c182d06429e0cdc31d6f6ea9bb0e320e44d8415365167"} Feb 16 13:43:46 crc kubenswrapper[4816]: I0216 13:43:46.970804 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-catalog-content\") pod \"certified-operators-2thpr\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:46 crc kubenswrapper[4816]: I0216 13:43:46.970941 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt9ls\" (UniqueName: \"kubernetes.io/projected/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-kube-api-access-lt9ls\") pod \"certified-operators-2thpr\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:46 crc kubenswrapper[4816]: I0216 13:43:46.970988 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-utilities\") pod \"certified-operators-2thpr\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.072420 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt9ls\" (UniqueName: \"kubernetes.io/projected/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-kube-api-access-lt9ls\") pod \"certified-operators-2thpr\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.072514 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-utilities\") pod \"certified-operators-2thpr\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.072543 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-catalog-content\") pod \"certified-operators-2thpr\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.073048 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-utilities\") pod \"certified-operators-2thpr\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.073128 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-catalog-content\") pod \"certified-operators-2thpr\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.098620 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt9ls\" (UniqueName: \"kubernetes.io/projected/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-kube-api-access-lt9ls\") pod \"certified-operators-2thpr\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.146493 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.699055 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2thpr"] Feb 16 13:43:47 crc kubenswrapper[4816]: W0216 13:43:47.707966 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf6e4cd3_36d8_4179_bf70_fa7aef3cbe0b.slice/crio-ec1f03e17408f60a8779d8533cd1175f032a856fad0b0b8b197f7f0f3e645f55 WatchSource:0}: Error finding container ec1f03e17408f60a8779d8533cd1175f032a856fad0b0b8b197f7f0f3e645f55: Status 404 returned error can't find the container with id ec1f03e17408f60a8779d8533cd1175f032a856fad0b0b8b197f7f0f3e645f55 Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.944570 4816 generic.go:334] "Generic (PLEG): container finished" podID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerID="f813a3e252ab40f28b0c182d06429e0cdc31d6f6ea9bb0e320e44d8415365167" exitCode=0 Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.944623 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bb92" event={"ID":"1690ac3d-29aa-4efe-a378-5d19ff514b9a","Type":"ContainerDied","Data":"f813a3e252ab40f28b0c182d06429e0cdc31d6f6ea9bb0e320e44d8415365167"} Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.947788 4816 generic.go:334] "Generic (PLEG): container finished" podID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerID="6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6" exitCode=0 Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.947832 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2thpr" event={"ID":"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b","Type":"ContainerDied","Data":"6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6"} Feb 16 13:43:47 crc kubenswrapper[4816]: I0216 13:43:47.947853 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2thpr" event={"ID":"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b","Type":"ContainerStarted","Data":"ec1f03e17408f60a8779d8533cd1175f032a856fad0b0b8b197f7f0f3e645f55"} Feb 16 13:43:48 crc kubenswrapper[4816]: I0216 13:43:48.958273 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bb92" event={"ID":"1690ac3d-29aa-4efe-a378-5d19ff514b9a","Type":"ContainerStarted","Data":"7aec3b23c0535c2fea326ab9c07acd5020f7a3409b79da35ec27de2a75770c77"} Feb 16 13:43:48 crc kubenswrapper[4816]: I0216 13:43:48.960833 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2thpr" event={"ID":"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b","Type":"ContainerStarted","Data":"f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4"} Feb 16 13:43:48 crc kubenswrapper[4816]: I0216 13:43:48.983752 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8bb92" podStartSLOduration=2.592442266 podStartE2EDuration="4.983729613s" podCreationTimestamp="2026-02-16 13:43:44 +0000 UTC" firstStartedPulling="2026-02-16 13:43:45.931769483 +0000 UTC m=+2425.258483201" lastFinishedPulling="2026-02-16 13:43:48.32305682 +0000 UTC m=+2427.649770548" observedRunningTime="2026-02-16 13:43:48.976796083 +0000 UTC m=+2428.303509831" watchObservedRunningTime="2026-02-16 13:43:48.983729613 +0000 UTC m=+2428.310443351" Feb 16 13:43:49 crc kubenswrapper[4816]: I0216 13:43:49.970434 4816 generic.go:334] "Generic (PLEG): container finished" podID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerID="f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4" exitCode=0 Feb 16 13:43:49 crc kubenswrapper[4816]: I0216 13:43:49.970525 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2thpr" event={"ID":"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b","Type":"ContainerDied","Data":"f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4"} Feb 16 13:43:50 crc kubenswrapper[4816]: I0216 13:43:50.984407 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2thpr" event={"ID":"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b","Type":"ContainerStarted","Data":"395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4"} Feb 16 13:43:51 crc kubenswrapper[4816]: I0216 13:43:51.017713 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2thpr" podStartSLOduration=2.582075605 podStartE2EDuration="5.017651187s" podCreationTimestamp="2026-02-16 13:43:46 +0000 UTC" firstStartedPulling="2026-02-16 13:43:47.949000829 +0000 UTC m=+2427.275714567" lastFinishedPulling="2026-02-16 13:43:50.384576421 +0000 UTC m=+2429.711290149" observedRunningTime="2026-02-16 13:43:51.003788747 +0000 UTC m=+2430.330502485" watchObservedRunningTime="2026-02-16 13:43:51.017651187 +0000 UTC m=+2430.344364955" Feb 16 13:43:51 crc kubenswrapper[4816]: I0216 13:43:51.402294 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:43:51 crc kubenswrapper[4816]: E0216 13:43:51.402518 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:43:54 crc kubenswrapper[4816]: I0216 13:43:54.935810 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:54 crc kubenswrapper[4816]: I0216 13:43:54.936444 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:54 crc kubenswrapper[4816]: I0216 13:43:54.977551 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:55 crc kubenswrapper[4816]: I0216 13:43:55.057941 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:55 crc kubenswrapper[4816]: I0216 13:43:55.415228 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8bb92"] Feb 16 13:43:57 crc kubenswrapper[4816]: I0216 13:43:57.034476 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8bb92" podUID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerName="registry-server" containerID="cri-o://7aec3b23c0535c2fea326ab9c07acd5020f7a3409b79da35ec27de2a75770c77" gracePeriod=2 Feb 16 13:43:57 crc kubenswrapper[4816]: I0216 13:43:57.145904 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:57 crc kubenswrapper[4816]: I0216 13:43:57.146119 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:57 crc kubenswrapper[4816]: I0216 13:43:57.191628 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.046382 4816 generic.go:334] "Generic (PLEG): container finished" podID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerID="7aec3b23c0535c2fea326ab9c07acd5020f7a3409b79da35ec27de2a75770c77" exitCode=0 Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.047867 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bb92" event={"ID":"1690ac3d-29aa-4efe-a378-5d19ff514b9a","Type":"ContainerDied","Data":"7aec3b23c0535c2fea326ab9c07acd5020f7a3409b79da35ec27de2a75770c77"} Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.096226 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.111843 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.236770 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pp4nb\" (UniqueName: \"kubernetes.io/projected/1690ac3d-29aa-4efe-a378-5d19ff514b9a-kube-api-access-pp4nb\") pod \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.236896 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-catalog-content\") pod \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.236984 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-utilities\") pod \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\" (UID: \"1690ac3d-29aa-4efe-a378-5d19ff514b9a\") " Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.238557 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-utilities" (OuterVolumeSpecName: "utilities") pod "1690ac3d-29aa-4efe-a378-5d19ff514b9a" (UID: "1690ac3d-29aa-4efe-a378-5d19ff514b9a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.241025 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1690ac3d-29aa-4efe-a378-5d19ff514b9a-kube-api-access-pp4nb" (OuterVolumeSpecName: "kube-api-access-pp4nb") pod "1690ac3d-29aa-4efe-a378-5d19ff514b9a" (UID: "1690ac3d-29aa-4efe-a378-5d19ff514b9a"). InnerVolumeSpecName "kube-api-access-pp4nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.290193 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1690ac3d-29aa-4efe-a378-5d19ff514b9a" (UID: "1690ac3d-29aa-4efe-a378-5d19ff514b9a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.338554 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pp4nb\" (UniqueName: \"kubernetes.io/projected/1690ac3d-29aa-4efe-a378-5d19ff514b9a-kube-api-access-pp4nb\") on node \"crc\" DevicePath \"\"" Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.338595 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:43:58 crc kubenswrapper[4816]: I0216 13:43:58.338609 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1690ac3d-29aa-4efe-a378-5d19ff514b9a-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:43:59 crc kubenswrapper[4816]: I0216 13:43:59.059772 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bb92" event={"ID":"1690ac3d-29aa-4efe-a378-5d19ff514b9a","Type":"ContainerDied","Data":"95d9c12fa2d23b5d7eb42b31740e0a7ae6e68d0a45f6805c50c6036f1055adaf"} Feb 16 13:43:59 crc kubenswrapper[4816]: I0216 13:43:59.059853 4816 scope.go:117] "RemoveContainer" containerID="7aec3b23c0535c2fea326ab9c07acd5020f7a3409b79da35ec27de2a75770c77" Feb 16 13:43:59 crc kubenswrapper[4816]: I0216 13:43:59.059857 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bb92" Feb 16 13:43:59 crc kubenswrapper[4816]: I0216 13:43:59.088406 4816 scope.go:117] "RemoveContainer" containerID="f813a3e252ab40f28b0c182d06429e0cdc31d6f6ea9bb0e320e44d8415365167" Feb 16 13:43:59 crc kubenswrapper[4816]: I0216 13:43:59.107979 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8bb92"] Feb 16 13:43:59 crc kubenswrapper[4816]: I0216 13:43:59.117280 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8bb92"] Feb 16 13:43:59 crc kubenswrapper[4816]: I0216 13:43:59.118041 4816 scope.go:117] "RemoveContainer" containerID="51e31e2d48ad63d51b30cee41aefe71f8edb0e94b3bab239e9ad1b8b08e28349" Feb 16 13:43:59 crc kubenswrapper[4816]: I0216 13:43:59.410490 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" path="/var/lib/kubelet/pods/1690ac3d-29aa-4efe-a378-5d19ff514b9a/volumes" Feb 16 13:43:59 crc kubenswrapper[4816]: I0216 13:43:59.411349 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2thpr"] Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.077914 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2thpr" podUID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerName="registry-server" containerID="cri-o://395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4" gracePeriod=2 Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.485506 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.589946 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-catalog-content\") pod \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.590075 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-utilities\") pod \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.590121 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt9ls\" (UniqueName: \"kubernetes.io/projected/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-kube-api-access-lt9ls\") pod \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\" (UID: \"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b\") " Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.590835 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-utilities" (OuterVolumeSpecName: "utilities") pod "af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" (UID: "af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.597063 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-kube-api-access-lt9ls" (OuterVolumeSpecName: "kube-api-access-lt9ls") pod "af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" (UID: "af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b"). InnerVolumeSpecName "kube-api-access-lt9ls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.660258 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" (UID: "af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.691421 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.691464 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt9ls\" (UniqueName: \"kubernetes.io/projected/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-kube-api-access-lt9ls\") on node \"crc\" DevicePath \"\"" Feb 16 13:44:01 crc kubenswrapper[4816]: I0216 13:44:01.691523 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.091007 4816 generic.go:334] "Generic (PLEG): container finished" podID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerID="395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4" exitCode=0 Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.091083 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2thpr" event={"ID":"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b","Type":"ContainerDied","Data":"395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4"} Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.091110 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2thpr" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.091148 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2thpr" event={"ID":"af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b","Type":"ContainerDied","Data":"ec1f03e17408f60a8779d8533cd1175f032a856fad0b0b8b197f7f0f3e645f55"} Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.091180 4816 scope.go:117] "RemoveContainer" containerID="395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.133802 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2thpr"] Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.138915 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2thpr"] Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.139769 4816 scope.go:117] "RemoveContainer" containerID="f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.165964 4816 scope.go:117] "RemoveContainer" containerID="6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.192260 4816 scope.go:117] "RemoveContainer" containerID="395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4" Feb 16 13:44:02 crc kubenswrapper[4816]: E0216 13:44:02.192796 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4\": container with ID starting with 395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4 not found: ID does not exist" containerID="395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.192850 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4"} err="failed to get container status \"395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4\": rpc error: code = NotFound desc = could not find container \"395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4\": container with ID starting with 395ee3e650c7e03d9b1e219e3b04994600f01b4c29c119ff3b0ed73e065e37f4 not found: ID does not exist" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.192886 4816 scope.go:117] "RemoveContainer" containerID="f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4" Feb 16 13:44:02 crc kubenswrapper[4816]: E0216 13:44:02.193261 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4\": container with ID starting with f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4 not found: ID does not exist" containerID="f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.193307 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4"} err="failed to get container status \"f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4\": rpc error: code = NotFound desc = could not find container \"f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4\": container with ID starting with f8868c04e6c66e05d96ed893fdde103726366eb34f539c59700ad5826b6acbc4 not found: ID does not exist" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.193335 4816 scope.go:117] "RemoveContainer" containerID="6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6" Feb 16 13:44:02 crc kubenswrapper[4816]: E0216 13:44:02.193778 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6\": container with ID starting with 6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6 not found: ID does not exist" containerID="6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6" Feb 16 13:44:02 crc kubenswrapper[4816]: I0216 13:44:02.193811 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6"} err="failed to get container status \"6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6\": rpc error: code = NotFound desc = could not find container \"6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6\": container with ID starting with 6e1c2d59bce97ea1687fe39edc119c3ac1e02f5e27bc56c84650e627b6438ce6 not found: ID does not exist" Feb 16 13:44:03 crc kubenswrapper[4816]: I0216 13:44:03.406848 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" path="/var/lib/kubelet/pods/af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b/volumes" Feb 16 13:44:06 crc kubenswrapper[4816]: I0216 13:44:06.399064 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:44:06 crc kubenswrapper[4816]: E0216 13:44:06.399831 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:44:17 crc kubenswrapper[4816]: I0216 13:44:17.398979 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:44:17 crc kubenswrapper[4816]: E0216 13:44:17.399765 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:44:28 crc kubenswrapper[4816]: I0216 13:44:28.399457 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:44:28 crc kubenswrapper[4816]: E0216 13:44:28.401078 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:44:39 crc kubenswrapper[4816]: I0216 13:44:39.399347 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:44:39 crc kubenswrapper[4816]: E0216 13:44:39.400089 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:44:54 crc kubenswrapper[4816]: I0216 13:44:54.399280 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:44:54 crc kubenswrapper[4816]: E0216 13:44:54.400073 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.145229 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn"] Feb 16 13:45:00 crc kubenswrapper[4816]: E0216 13:45:00.147352 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerName="extract-content" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.147506 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerName="extract-content" Feb 16 13:45:00 crc kubenswrapper[4816]: E0216 13:45:00.147688 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerName="registry-server" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.147810 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerName="registry-server" Feb 16 13:45:00 crc kubenswrapper[4816]: E0216 13:45:00.147933 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerName="registry-server" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.148049 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerName="registry-server" Feb 16 13:45:00 crc kubenswrapper[4816]: E0216 13:45:00.148173 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerName="extract-utilities" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.148282 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerName="extract-utilities" Feb 16 13:45:00 crc kubenswrapper[4816]: E0216 13:45:00.148401 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerName="extract-content" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.148511 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerName="extract-content" Feb 16 13:45:00 crc kubenswrapper[4816]: E0216 13:45:00.148620 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerName="extract-utilities" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.148762 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerName="extract-utilities" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.149138 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="af6e4cd3-36d8-4179-bf70-fa7aef3cbe0b" containerName="registry-server" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.149299 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1690ac3d-29aa-4efe-a378-5d19ff514b9a" containerName="registry-server" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.150029 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.153057 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.153769 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn"] Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.154128 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.204786 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be5767d1-220b-450a-ba86-ac360d4707e4-secret-volume\") pod \"collect-profiles-29520825-hjbbn\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.204912 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be5767d1-220b-450a-ba86-ac360d4707e4-config-volume\") pod \"collect-profiles-29520825-hjbbn\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.205042 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwzjw\" (UniqueName: \"kubernetes.io/projected/be5767d1-220b-450a-ba86-ac360d4707e4-kube-api-access-hwzjw\") pod \"collect-profiles-29520825-hjbbn\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.305627 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwzjw\" (UniqueName: \"kubernetes.io/projected/be5767d1-220b-450a-ba86-ac360d4707e4-kube-api-access-hwzjw\") pod \"collect-profiles-29520825-hjbbn\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.305973 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be5767d1-220b-450a-ba86-ac360d4707e4-secret-volume\") pod \"collect-profiles-29520825-hjbbn\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.306123 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be5767d1-220b-450a-ba86-ac360d4707e4-config-volume\") pod \"collect-profiles-29520825-hjbbn\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.307346 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be5767d1-220b-450a-ba86-ac360d4707e4-config-volume\") pod \"collect-profiles-29520825-hjbbn\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.320846 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be5767d1-220b-450a-ba86-ac360d4707e4-secret-volume\") pod \"collect-profiles-29520825-hjbbn\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.321652 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwzjw\" (UniqueName: \"kubernetes.io/projected/be5767d1-220b-450a-ba86-ac360d4707e4-kube-api-access-hwzjw\") pod \"collect-profiles-29520825-hjbbn\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.481611 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:00 crc kubenswrapper[4816]: I0216 13:45:00.701438 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn"] Feb 16 13:45:01 crc kubenswrapper[4816]: I0216 13:45:01.544199 4816 generic.go:334] "Generic (PLEG): container finished" podID="be5767d1-220b-450a-ba86-ac360d4707e4" containerID="cfa94495e332b1246273a57aa51fab612d4d2e7922b556b917fd591641968d1a" exitCode=0 Feb 16 13:45:01 crc kubenswrapper[4816]: I0216 13:45:01.544816 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" event={"ID":"be5767d1-220b-450a-ba86-ac360d4707e4","Type":"ContainerDied","Data":"cfa94495e332b1246273a57aa51fab612d4d2e7922b556b917fd591641968d1a"} Feb 16 13:45:01 crc kubenswrapper[4816]: I0216 13:45:01.545490 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" event={"ID":"be5767d1-220b-450a-ba86-ac360d4707e4","Type":"ContainerStarted","Data":"d9391c4c11764290e69d4c0ee4a730c3d0e9a4aa962bc640c10452663701f069"} Feb 16 13:45:02 crc kubenswrapper[4816]: I0216 13:45:02.865806 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:02 crc kubenswrapper[4816]: I0216 13:45:02.965203 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be5767d1-220b-450a-ba86-ac360d4707e4-secret-volume\") pod \"be5767d1-220b-450a-ba86-ac360d4707e4\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " Feb 16 13:45:02 crc kubenswrapper[4816]: I0216 13:45:02.965319 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwzjw\" (UniqueName: \"kubernetes.io/projected/be5767d1-220b-450a-ba86-ac360d4707e4-kube-api-access-hwzjw\") pod \"be5767d1-220b-450a-ba86-ac360d4707e4\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " Feb 16 13:45:02 crc kubenswrapper[4816]: I0216 13:45:02.965357 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be5767d1-220b-450a-ba86-ac360d4707e4-config-volume\") pod \"be5767d1-220b-450a-ba86-ac360d4707e4\" (UID: \"be5767d1-220b-450a-ba86-ac360d4707e4\") " Feb 16 13:45:02 crc kubenswrapper[4816]: I0216 13:45:02.966467 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be5767d1-220b-450a-ba86-ac360d4707e4-config-volume" (OuterVolumeSpecName: "config-volume") pod "be5767d1-220b-450a-ba86-ac360d4707e4" (UID: "be5767d1-220b-450a-ba86-ac360d4707e4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 13:45:02 crc kubenswrapper[4816]: I0216 13:45:02.970816 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be5767d1-220b-450a-ba86-ac360d4707e4-kube-api-access-hwzjw" (OuterVolumeSpecName: "kube-api-access-hwzjw") pod "be5767d1-220b-450a-ba86-ac360d4707e4" (UID: "be5767d1-220b-450a-ba86-ac360d4707e4"). InnerVolumeSpecName "kube-api-access-hwzjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:45:02 crc kubenswrapper[4816]: I0216 13:45:02.970887 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be5767d1-220b-450a-ba86-ac360d4707e4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "be5767d1-220b-450a-ba86-ac360d4707e4" (UID: "be5767d1-220b-450a-ba86-ac360d4707e4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 13:45:03 crc kubenswrapper[4816]: I0216 13:45:03.067629 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwzjw\" (UniqueName: \"kubernetes.io/projected/be5767d1-220b-450a-ba86-ac360d4707e4-kube-api-access-hwzjw\") on node \"crc\" DevicePath \"\"" Feb 16 13:45:03 crc kubenswrapper[4816]: I0216 13:45:03.067698 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/be5767d1-220b-450a-ba86-ac360d4707e4-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 13:45:03 crc kubenswrapper[4816]: I0216 13:45:03.067716 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/be5767d1-220b-450a-ba86-ac360d4707e4-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 13:45:03 crc kubenswrapper[4816]: I0216 13:45:03.614364 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" event={"ID":"be5767d1-220b-450a-ba86-ac360d4707e4","Type":"ContainerDied","Data":"d9391c4c11764290e69d4c0ee4a730c3d0e9a4aa962bc640c10452663701f069"} Feb 16 13:45:03 crc kubenswrapper[4816]: I0216 13:45:03.614428 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9391c4c11764290e69d4c0ee4a730c3d0e9a4aa962bc640c10452663701f069" Feb 16 13:45:03 crc kubenswrapper[4816]: I0216 13:45:03.614503 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn" Feb 16 13:45:03 crc kubenswrapper[4816]: I0216 13:45:03.940025 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88"] Feb 16 13:45:03 crc kubenswrapper[4816]: I0216 13:45:03.947314 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520780-nkg88"] Feb 16 13:45:05 crc kubenswrapper[4816]: I0216 13:45:05.399495 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:45:05 crc kubenswrapper[4816]: E0216 13:45:05.400015 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:45:05 crc kubenswrapper[4816]: I0216 13:45:05.412282 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e34f8aa9-54c5-4964-a481-ff6745ec54d8" path="/var/lib/kubelet/pods/e34f8aa9-54c5-4964-a481-ff6745ec54d8/volumes" Feb 16 13:45:19 crc kubenswrapper[4816]: I0216 13:45:19.399491 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:45:19 crc kubenswrapper[4816]: E0216 13:45:19.400951 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:45:26 crc kubenswrapper[4816]: I0216 13:45:26.178834 4816 scope.go:117] "RemoveContainer" containerID="14530c301f50b9ac1dac510e1d92fcd118b840d004c315d661a4f261c670ab7f" Feb 16 13:45:31 crc kubenswrapper[4816]: I0216 13:45:31.402523 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:45:31 crc kubenswrapper[4816]: E0216 13:45:31.403241 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:45:44 crc kubenswrapper[4816]: I0216 13:45:44.399818 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:45:44 crc kubenswrapper[4816]: E0216 13:45:44.400994 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:45:59 crc kubenswrapper[4816]: I0216 13:45:59.398213 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:45:59 crc kubenswrapper[4816]: E0216 13:45:59.398939 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:46:10 crc kubenswrapper[4816]: I0216 13:46:10.399131 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:46:10 crc kubenswrapper[4816]: E0216 13:46:10.400032 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:46:23 crc kubenswrapper[4816]: I0216 13:46:23.399958 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:46:23 crc kubenswrapper[4816]: E0216 13:46:23.402581 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:46:37 crc kubenswrapper[4816]: I0216 13:46:37.400376 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:46:37 crc kubenswrapper[4816]: E0216 13:46:37.402226 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:46:37 crc kubenswrapper[4816]: I0216 13:46:37.995195 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vrhrn"] Feb 16 13:46:37 crc kubenswrapper[4816]: E0216 13:46:37.995631 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be5767d1-220b-450a-ba86-ac360d4707e4" containerName="collect-profiles" Feb 16 13:46:37 crc kubenswrapper[4816]: I0216 13:46:37.995650 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="be5767d1-220b-450a-ba86-ac360d4707e4" containerName="collect-profiles" Feb 16 13:46:37 crc kubenswrapper[4816]: I0216 13:46:37.995857 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="be5767d1-220b-450a-ba86-ac360d4707e4" containerName="collect-profiles" Feb 16 13:46:37 crc kubenswrapper[4816]: I0216 13:46:37.998677 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.050598 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrhrn"] Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.139633 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-catalog-content\") pod \"redhat-marketplace-vrhrn\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.139730 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-utilities\") pod \"redhat-marketplace-vrhrn\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.139821 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lv7p\" (UniqueName: \"kubernetes.io/projected/c34c3787-74ba-4760-b383-c49fe91adac2-kube-api-access-6lv7p\") pod \"redhat-marketplace-vrhrn\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.241705 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-catalog-content\") pod \"redhat-marketplace-vrhrn\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.241751 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-utilities\") pod \"redhat-marketplace-vrhrn\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.241799 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lv7p\" (UniqueName: \"kubernetes.io/projected/c34c3787-74ba-4760-b383-c49fe91adac2-kube-api-access-6lv7p\") pod \"redhat-marketplace-vrhrn\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.242403 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-catalog-content\") pod \"redhat-marketplace-vrhrn\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.242457 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-utilities\") pod \"redhat-marketplace-vrhrn\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.262026 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lv7p\" (UniqueName: \"kubernetes.io/projected/c34c3787-74ba-4760-b383-c49fe91adac2-kube-api-access-6lv7p\") pod \"redhat-marketplace-vrhrn\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.345089 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:38 crc kubenswrapper[4816]: I0216 13:46:38.582580 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrhrn"] Feb 16 13:46:39 crc kubenswrapper[4816]: I0216 13:46:39.402522 4816 generic.go:334] "Generic (PLEG): container finished" podID="c34c3787-74ba-4760-b383-c49fe91adac2" containerID="d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464" exitCode=0 Feb 16 13:46:39 crc kubenswrapper[4816]: I0216 13:46:39.411270 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrhrn" event={"ID":"c34c3787-74ba-4760-b383-c49fe91adac2","Type":"ContainerDied","Data":"d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464"} Feb 16 13:46:39 crc kubenswrapper[4816]: I0216 13:46:39.411321 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrhrn" event={"ID":"c34c3787-74ba-4760-b383-c49fe91adac2","Type":"ContainerStarted","Data":"45328f644bb52c351d383e2122b63e68b8f11b9d6b21bee0d7f45ede70bdab3c"} Feb 16 13:46:41 crc kubenswrapper[4816]: I0216 13:46:41.416021 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrhrn" event={"ID":"c34c3787-74ba-4760-b383-c49fe91adac2","Type":"ContainerStarted","Data":"5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de"} Feb 16 13:46:42 crc kubenswrapper[4816]: I0216 13:46:42.430464 4816 generic.go:334] "Generic (PLEG): container finished" podID="c34c3787-74ba-4760-b383-c49fe91adac2" containerID="5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de" exitCode=0 Feb 16 13:46:42 crc kubenswrapper[4816]: I0216 13:46:42.430547 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrhrn" event={"ID":"c34c3787-74ba-4760-b383-c49fe91adac2","Type":"ContainerDied","Data":"5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de"} Feb 16 13:46:43 crc kubenswrapper[4816]: I0216 13:46:43.438918 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrhrn" event={"ID":"c34c3787-74ba-4760-b383-c49fe91adac2","Type":"ContainerStarted","Data":"af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8"} Feb 16 13:46:43 crc kubenswrapper[4816]: I0216 13:46:43.460126 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vrhrn" podStartSLOduration=2.945020824 podStartE2EDuration="6.460100434s" podCreationTimestamp="2026-02-16 13:46:37 +0000 UTC" firstStartedPulling="2026-02-16 13:46:39.404982481 +0000 UTC m=+2598.731696239" lastFinishedPulling="2026-02-16 13:46:42.920062111 +0000 UTC m=+2602.246775849" observedRunningTime="2026-02-16 13:46:43.455704933 +0000 UTC m=+2602.782418691" watchObservedRunningTime="2026-02-16 13:46:43.460100434 +0000 UTC m=+2602.786814182" Feb 16 13:46:48 crc kubenswrapper[4816]: I0216 13:46:48.345840 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:48 crc kubenswrapper[4816]: I0216 13:46:48.346082 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:48 crc kubenswrapper[4816]: I0216 13:46:48.411482 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:48 crc kubenswrapper[4816]: I0216 13:46:48.525190 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:48 crc kubenswrapper[4816]: I0216 13:46:48.652044 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrhrn"] Feb 16 13:46:50 crc kubenswrapper[4816]: I0216 13:46:50.493937 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vrhrn" podUID="c34c3787-74ba-4760-b383-c49fe91adac2" containerName="registry-server" containerID="cri-o://af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8" gracePeriod=2 Feb 16 13:46:50 crc kubenswrapper[4816]: I0216 13:46:50.895407 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:50 crc kubenswrapper[4816]: I0216 13:46:50.929115 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-catalog-content\") pod \"c34c3787-74ba-4760-b383-c49fe91adac2\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " Feb 16 13:46:50 crc kubenswrapper[4816]: I0216 13:46:50.929179 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-utilities\") pod \"c34c3787-74ba-4760-b383-c49fe91adac2\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " Feb 16 13:46:50 crc kubenswrapper[4816]: I0216 13:46:50.929260 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lv7p\" (UniqueName: \"kubernetes.io/projected/c34c3787-74ba-4760-b383-c49fe91adac2-kube-api-access-6lv7p\") pod \"c34c3787-74ba-4760-b383-c49fe91adac2\" (UID: \"c34c3787-74ba-4760-b383-c49fe91adac2\") " Feb 16 13:46:50 crc kubenswrapper[4816]: I0216 13:46:50.930026 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-utilities" (OuterVolumeSpecName: "utilities") pod "c34c3787-74ba-4760-b383-c49fe91adac2" (UID: "c34c3787-74ba-4760-b383-c49fe91adac2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:46:50 crc kubenswrapper[4816]: I0216 13:46:50.935061 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c34c3787-74ba-4760-b383-c49fe91adac2-kube-api-access-6lv7p" (OuterVolumeSpecName: "kube-api-access-6lv7p") pod "c34c3787-74ba-4760-b383-c49fe91adac2" (UID: "c34c3787-74ba-4760-b383-c49fe91adac2"). InnerVolumeSpecName "kube-api-access-6lv7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:46:50 crc kubenswrapper[4816]: I0216 13:46:50.971819 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c34c3787-74ba-4760-b383-c49fe91adac2" (UID: "c34c3787-74ba-4760-b383-c49fe91adac2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.030865 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.030893 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c34c3787-74ba-4760-b383-c49fe91adac2-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.030906 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lv7p\" (UniqueName: \"kubernetes.io/projected/c34c3787-74ba-4760-b383-c49fe91adac2-kube-api-access-6lv7p\") on node \"crc\" DevicePath \"\"" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.403157 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:46:51 crc kubenswrapper[4816]: E0216 13:46:51.403685 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.503966 4816 generic.go:334] "Generic (PLEG): container finished" podID="c34c3787-74ba-4760-b383-c49fe91adac2" containerID="af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8" exitCode=0 Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.504019 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrhrn" event={"ID":"c34c3787-74ba-4760-b383-c49fe91adac2","Type":"ContainerDied","Data":"af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8"} Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.504038 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrhrn" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.504056 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrhrn" event={"ID":"c34c3787-74ba-4760-b383-c49fe91adac2","Type":"ContainerDied","Data":"45328f644bb52c351d383e2122b63e68b8f11b9d6b21bee0d7f45ede70bdab3c"} Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.504077 4816 scope.go:117] "RemoveContainer" containerID="af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.536143 4816 scope.go:117] "RemoveContainer" containerID="5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.537365 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrhrn"] Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.546693 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrhrn"] Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.561430 4816 scope.go:117] "RemoveContainer" containerID="d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.588015 4816 scope.go:117] "RemoveContainer" containerID="af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8" Feb 16 13:46:51 crc kubenswrapper[4816]: E0216 13:46:51.588433 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8\": container with ID starting with af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8 not found: ID does not exist" containerID="af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.588474 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8"} err="failed to get container status \"af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8\": rpc error: code = NotFound desc = could not find container \"af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8\": container with ID starting with af31eb1b0703f5568b99ab5d68101fec3834e62849cc4e23c24e10977f4e99b8 not found: ID does not exist" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.588498 4816 scope.go:117] "RemoveContainer" containerID="5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de" Feb 16 13:46:51 crc kubenswrapper[4816]: E0216 13:46:51.588754 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de\": container with ID starting with 5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de not found: ID does not exist" containerID="5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.588795 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de"} err="failed to get container status \"5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de\": rpc error: code = NotFound desc = could not find container \"5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de\": container with ID starting with 5f8686557668e47bc4742e99e6c3abadb18e0346f3b411b95b6eb52edb5910de not found: ID does not exist" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.588821 4816 scope.go:117] "RemoveContainer" containerID="d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464" Feb 16 13:46:51 crc kubenswrapper[4816]: E0216 13:46:51.589082 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464\": container with ID starting with d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464 not found: ID does not exist" containerID="d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464" Feb 16 13:46:51 crc kubenswrapper[4816]: I0216 13:46:51.589125 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464"} err="failed to get container status \"d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464\": rpc error: code = NotFound desc = could not find container \"d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464\": container with ID starting with d6beb47952446516a33b5e2e89c4b2fe56a3c2b70a943848d5f4595f5282f464 not found: ID does not exist" Feb 16 13:46:53 crc kubenswrapper[4816]: I0216 13:46:53.407084 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c34c3787-74ba-4760-b383-c49fe91adac2" path="/var/lib/kubelet/pods/c34c3787-74ba-4760-b383-c49fe91adac2/volumes" Feb 16 13:47:06 crc kubenswrapper[4816]: I0216 13:47:06.399442 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:47:06 crc kubenswrapper[4816]: E0216 13:47:06.400585 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:47:18 crc kubenswrapper[4816]: I0216 13:47:18.399129 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:47:18 crc kubenswrapper[4816]: E0216 13:47:18.400004 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:47:31 crc kubenswrapper[4816]: I0216 13:47:31.405934 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:47:31 crc kubenswrapper[4816]: E0216 13:47:31.406915 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:47:45 crc kubenswrapper[4816]: I0216 13:47:45.399133 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:47:45 crc kubenswrapper[4816]: E0216 13:47:45.399965 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:47:57 crc kubenswrapper[4816]: I0216 13:47:57.398446 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:47:57 crc kubenswrapper[4816]: E0216 13:47:57.399217 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:48:10 crc kubenswrapper[4816]: I0216 13:48:10.399075 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:48:10 crc kubenswrapper[4816]: E0216 13:48:10.399838 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:48:21 crc kubenswrapper[4816]: I0216 13:48:21.404134 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:48:21 crc kubenswrapper[4816]: E0216 13:48:21.405233 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:48:36 crc kubenswrapper[4816]: I0216 13:48:36.398213 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:48:36 crc kubenswrapper[4816]: E0216 13:48:36.398913 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:48:51 crc kubenswrapper[4816]: I0216 13:48:51.402565 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:48:52 crc kubenswrapper[4816]: I0216 13:48:52.472285 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"1b44e621f66d5c607087cc8c5f5e81b0f4e01c9a3aaa1ee8f561d1323e3679e6"} Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.363202 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9xbhf"] Feb 16 13:49:55 crc kubenswrapper[4816]: E0216 13:49:55.364626 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c34c3787-74ba-4760-b383-c49fe91adac2" containerName="registry-server" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.364703 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c34c3787-74ba-4760-b383-c49fe91adac2" containerName="registry-server" Feb 16 13:49:55 crc kubenswrapper[4816]: E0216 13:49:55.364746 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c34c3787-74ba-4760-b383-c49fe91adac2" containerName="extract-utilities" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.364762 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c34c3787-74ba-4760-b383-c49fe91adac2" containerName="extract-utilities" Feb 16 13:49:55 crc kubenswrapper[4816]: E0216 13:49:55.364791 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c34c3787-74ba-4760-b383-c49fe91adac2" containerName="extract-content" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.364806 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c34c3787-74ba-4760-b383-c49fe91adac2" containerName="extract-content" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.365134 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c34c3787-74ba-4760-b383-c49fe91adac2" containerName="registry-server" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.367127 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.380061 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9xbhf"] Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.567760 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-utilities\") pod \"redhat-operators-9xbhf\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.567890 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfhnm\" (UniqueName: \"kubernetes.io/projected/eef528a1-18a1-4daf-8c16-face155e99b3-kube-api-access-rfhnm\") pod \"redhat-operators-9xbhf\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.568024 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-catalog-content\") pod \"redhat-operators-9xbhf\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.669096 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-catalog-content\") pod \"redhat-operators-9xbhf\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.669193 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-utilities\") pod \"redhat-operators-9xbhf\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.669217 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfhnm\" (UniqueName: \"kubernetes.io/projected/eef528a1-18a1-4daf-8c16-face155e99b3-kube-api-access-rfhnm\") pod \"redhat-operators-9xbhf\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.669864 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-utilities\") pod \"redhat-operators-9xbhf\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.669920 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-catalog-content\") pod \"redhat-operators-9xbhf\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.692568 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfhnm\" (UniqueName: \"kubernetes.io/projected/eef528a1-18a1-4daf-8c16-face155e99b3-kube-api-access-rfhnm\") pod \"redhat-operators-9xbhf\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:55 crc kubenswrapper[4816]: I0216 13:49:55.708497 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:49:56 crc kubenswrapper[4816]: I0216 13:49:56.168209 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9xbhf"] Feb 16 13:49:57 crc kubenswrapper[4816]: I0216 13:49:57.113252 4816 generic.go:334] "Generic (PLEG): container finished" podID="eef528a1-18a1-4daf-8c16-face155e99b3" containerID="a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64" exitCode=0 Feb 16 13:49:57 crc kubenswrapper[4816]: I0216 13:49:57.113503 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xbhf" event={"ID":"eef528a1-18a1-4daf-8c16-face155e99b3","Type":"ContainerDied","Data":"a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64"} Feb 16 13:49:57 crc kubenswrapper[4816]: I0216 13:49:57.113527 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xbhf" event={"ID":"eef528a1-18a1-4daf-8c16-face155e99b3","Type":"ContainerStarted","Data":"857fb0d1e91c5ef148ea5e12960b1004b244465eb36f5a3c8d5f6002815f2851"} Feb 16 13:49:57 crc kubenswrapper[4816]: I0216 13:49:57.117004 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 13:49:58 crc kubenswrapper[4816]: I0216 13:49:58.122667 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xbhf" event={"ID":"eef528a1-18a1-4daf-8c16-face155e99b3","Type":"ContainerStarted","Data":"c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d"} Feb 16 13:49:59 crc kubenswrapper[4816]: I0216 13:49:59.133101 4816 generic.go:334] "Generic (PLEG): container finished" podID="eef528a1-18a1-4daf-8c16-face155e99b3" containerID="c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d" exitCode=0 Feb 16 13:49:59 crc kubenswrapper[4816]: I0216 13:49:59.133167 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xbhf" event={"ID":"eef528a1-18a1-4daf-8c16-face155e99b3","Type":"ContainerDied","Data":"c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d"} Feb 16 13:50:00 crc kubenswrapper[4816]: I0216 13:50:00.142471 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xbhf" event={"ID":"eef528a1-18a1-4daf-8c16-face155e99b3","Type":"ContainerStarted","Data":"756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9"} Feb 16 13:50:00 crc kubenswrapper[4816]: I0216 13:50:00.161640 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9xbhf" podStartSLOduration=2.634383532 podStartE2EDuration="5.161624725s" podCreationTimestamp="2026-02-16 13:49:55 +0000 UTC" firstStartedPulling="2026-02-16 13:49:57.116543257 +0000 UTC m=+2796.443257015" lastFinishedPulling="2026-02-16 13:49:59.64378448 +0000 UTC m=+2798.970498208" observedRunningTime="2026-02-16 13:50:00.160264307 +0000 UTC m=+2799.486978045" watchObservedRunningTime="2026-02-16 13:50:00.161624725 +0000 UTC m=+2799.488338453" Feb 16 13:50:05 crc kubenswrapper[4816]: I0216 13:50:05.708961 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:50:05 crc kubenswrapper[4816]: I0216 13:50:05.711535 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:50:06 crc kubenswrapper[4816]: I0216 13:50:06.774873 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9xbhf" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" containerName="registry-server" probeResult="failure" output=< Feb 16 13:50:06 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 13:50:06 crc kubenswrapper[4816]: > Feb 16 13:50:15 crc kubenswrapper[4816]: I0216 13:50:15.778963 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:50:15 crc kubenswrapper[4816]: I0216 13:50:15.851926 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:50:16 crc kubenswrapper[4816]: I0216 13:50:16.015632 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9xbhf"] Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.280164 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9xbhf" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" containerName="registry-server" containerID="cri-o://756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9" gracePeriod=2 Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.738472 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.841523 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-catalog-content\") pod \"eef528a1-18a1-4daf-8c16-face155e99b3\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.841644 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-utilities\") pod \"eef528a1-18a1-4daf-8c16-face155e99b3\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.841769 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfhnm\" (UniqueName: \"kubernetes.io/projected/eef528a1-18a1-4daf-8c16-face155e99b3-kube-api-access-rfhnm\") pod \"eef528a1-18a1-4daf-8c16-face155e99b3\" (UID: \"eef528a1-18a1-4daf-8c16-face155e99b3\") " Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.843132 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-utilities" (OuterVolumeSpecName: "utilities") pod "eef528a1-18a1-4daf-8c16-face155e99b3" (UID: "eef528a1-18a1-4daf-8c16-face155e99b3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.850884 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eef528a1-18a1-4daf-8c16-face155e99b3-kube-api-access-rfhnm" (OuterVolumeSpecName: "kube-api-access-rfhnm") pod "eef528a1-18a1-4daf-8c16-face155e99b3" (UID: "eef528a1-18a1-4daf-8c16-face155e99b3"). InnerVolumeSpecName "kube-api-access-rfhnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.944573 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfhnm\" (UniqueName: \"kubernetes.io/projected/eef528a1-18a1-4daf-8c16-face155e99b3-kube-api-access-rfhnm\") on node \"crc\" DevicePath \"\"" Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.944632 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:50:17 crc kubenswrapper[4816]: I0216 13:50:17.997644 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eef528a1-18a1-4daf-8c16-face155e99b3" (UID: "eef528a1-18a1-4daf-8c16-face155e99b3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.045803 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef528a1-18a1-4daf-8c16-face155e99b3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.290507 4816 generic.go:334] "Generic (PLEG): container finished" podID="eef528a1-18a1-4daf-8c16-face155e99b3" containerID="756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9" exitCode=0 Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.290574 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xbhf" event={"ID":"eef528a1-18a1-4daf-8c16-face155e99b3","Type":"ContainerDied","Data":"756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9"} Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.290600 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9xbhf" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.290631 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xbhf" event={"ID":"eef528a1-18a1-4daf-8c16-face155e99b3","Type":"ContainerDied","Data":"857fb0d1e91c5ef148ea5e12960b1004b244465eb36f5a3c8d5f6002815f2851"} Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.290696 4816 scope.go:117] "RemoveContainer" containerID="756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.322062 4816 scope.go:117] "RemoveContainer" containerID="c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.336920 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9xbhf"] Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.346872 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9xbhf"] Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.366108 4816 scope.go:117] "RemoveContainer" containerID="a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.389157 4816 scope.go:117] "RemoveContainer" containerID="756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9" Feb 16 13:50:18 crc kubenswrapper[4816]: E0216 13:50:18.409941 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9\": container with ID starting with 756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9 not found: ID does not exist" containerID="756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.410128 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9"} err="failed to get container status \"756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9\": rpc error: code = NotFound desc = could not find container \"756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9\": container with ID starting with 756a5d32f279c2c55153e0d81922ba0a81df9b4cce51f7b75676be762ca2e6b9 not found: ID does not exist" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.410183 4816 scope.go:117] "RemoveContainer" containerID="c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d" Feb 16 13:50:18 crc kubenswrapper[4816]: E0216 13:50:18.410939 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d\": container with ID starting with c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d not found: ID does not exist" containerID="c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.410998 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d"} err="failed to get container status \"c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d\": rpc error: code = NotFound desc = could not find container \"c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d\": container with ID starting with c74221fdc346d7f86df2369766344ae28f351e9ffd3ff214c323735be42d9e8d not found: ID does not exist" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.411032 4816 scope.go:117] "RemoveContainer" containerID="a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64" Feb 16 13:50:18 crc kubenswrapper[4816]: E0216 13:50:18.412560 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64\": container with ID starting with a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64 not found: ID does not exist" containerID="a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64" Feb 16 13:50:18 crc kubenswrapper[4816]: I0216 13:50:18.412617 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64"} err="failed to get container status \"a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64\": rpc error: code = NotFound desc = could not find container \"a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64\": container with ID starting with a65ec4217ccff6c3e63f84572803f161f3cbf4c478c32260c43b5d41b7ff2f64 not found: ID does not exist" Feb 16 13:50:19 crc kubenswrapper[4816]: I0216 13:50:19.414224 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" path="/var/lib/kubelet/pods/eef528a1-18a1-4daf-8c16-face155e99b3/volumes" Feb 16 13:51:06 crc kubenswrapper[4816]: I0216 13:51:06.941607 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:51:06 crc kubenswrapper[4816]: I0216 13:51:06.943972 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:51:36 crc kubenswrapper[4816]: I0216 13:51:36.941060 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:51:36 crc kubenswrapper[4816]: I0216 13:51:36.941534 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:52:06 crc kubenswrapper[4816]: I0216 13:52:06.941238 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:52:06 crc kubenswrapper[4816]: I0216 13:52:06.941966 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:52:06 crc kubenswrapper[4816]: I0216 13:52:06.942040 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:52:06 crc kubenswrapper[4816]: I0216 13:52:06.942791 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1b44e621f66d5c607087cc8c5f5e81b0f4e01c9a3aaa1ee8f561d1323e3679e6"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:52:06 crc kubenswrapper[4816]: I0216 13:52:06.942859 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://1b44e621f66d5c607087cc8c5f5e81b0f4e01c9a3aaa1ee8f561d1323e3679e6" gracePeriod=600 Feb 16 13:52:07 crc kubenswrapper[4816]: I0216 13:52:07.106575 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="1b44e621f66d5c607087cc8c5f5e81b0f4e01c9a3aaa1ee8f561d1323e3679e6" exitCode=0 Feb 16 13:52:07 crc kubenswrapper[4816]: I0216 13:52:07.106619 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"1b44e621f66d5c607087cc8c5f5e81b0f4e01c9a3aaa1ee8f561d1323e3679e6"} Feb 16 13:52:07 crc kubenswrapper[4816]: I0216 13:52:07.106652 4816 scope.go:117] "RemoveContainer" containerID="ae8c5802d3ca99d28a581208e156dfee61193b22a25d7a19e3d5608851494b8a" Feb 16 13:52:08 crc kubenswrapper[4816]: I0216 13:52:08.118164 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c"} Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.355112 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zgb5t"] Feb 16 13:54:19 crc kubenswrapper[4816]: E0216 13:54:19.356039 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" containerName="extract-content" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.356062 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" containerName="extract-content" Feb 16 13:54:19 crc kubenswrapper[4816]: E0216 13:54:19.356107 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" containerName="extract-utilities" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.356115 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" containerName="extract-utilities" Feb 16 13:54:19 crc kubenswrapper[4816]: E0216 13:54:19.356130 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" containerName="registry-server" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.356138 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" containerName="registry-server" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.356322 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="eef528a1-18a1-4daf-8c16-face155e99b3" containerName="registry-server" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.357533 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.369155 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zgb5t"] Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.437137 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-catalog-content\") pod \"certified-operators-zgb5t\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.437335 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-utilities\") pod \"certified-operators-zgb5t\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.437370 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjwdd\" (UniqueName: \"kubernetes.io/projected/92bd01d5-9432-4b43-9ab2-27692882e5aa-kube-api-access-xjwdd\") pod \"certified-operators-zgb5t\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.539460 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-utilities\") pod \"certified-operators-zgb5t\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.539527 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjwdd\" (UniqueName: \"kubernetes.io/projected/92bd01d5-9432-4b43-9ab2-27692882e5aa-kube-api-access-xjwdd\") pod \"certified-operators-zgb5t\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.539569 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-catalog-content\") pod \"certified-operators-zgb5t\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.542459 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-catalog-content\") pod \"certified-operators-zgb5t\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.542453 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-utilities\") pod \"certified-operators-zgb5t\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.567676 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjwdd\" (UniqueName: \"kubernetes.io/projected/92bd01d5-9432-4b43-9ab2-27692882e5aa-kube-api-access-xjwdd\") pod \"certified-operators-zgb5t\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:19 crc kubenswrapper[4816]: I0216 13:54:19.724618 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:20 crc kubenswrapper[4816]: I0216 13:54:20.172005 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zgb5t"] Feb 16 13:54:20 crc kubenswrapper[4816]: I0216 13:54:20.467568 4816 generic.go:334] "Generic (PLEG): container finished" podID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerID="3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7" exitCode=0 Feb 16 13:54:20 crc kubenswrapper[4816]: I0216 13:54:20.467636 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgb5t" event={"ID":"92bd01d5-9432-4b43-9ab2-27692882e5aa","Type":"ContainerDied","Data":"3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7"} Feb 16 13:54:20 crc kubenswrapper[4816]: I0216 13:54:20.467897 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgb5t" event={"ID":"92bd01d5-9432-4b43-9ab2-27692882e5aa","Type":"ContainerStarted","Data":"8014ce9a603b7a8ac7a61c7e47027c17ed34454e577e25c722c4a9bdd591682c"} Feb 16 13:54:21 crc kubenswrapper[4816]: I0216 13:54:21.476435 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgb5t" event={"ID":"92bd01d5-9432-4b43-9ab2-27692882e5aa","Type":"ContainerStarted","Data":"21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1"} Feb 16 13:54:22 crc kubenswrapper[4816]: I0216 13:54:22.485002 4816 generic.go:334] "Generic (PLEG): container finished" podID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerID="21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1" exitCode=0 Feb 16 13:54:22 crc kubenswrapper[4816]: I0216 13:54:22.485102 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgb5t" event={"ID":"92bd01d5-9432-4b43-9ab2-27692882e5aa","Type":"ContainerDied","Data":"21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1"} Feb 16 13:54:23 crc kubenswrapper[4816]: I0216 13:54:23.494284 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgb5t" event={"ID":"92bd01d5-9432-4b43-9ab2-27692882e5aa","Type":"ContainerStarted","Data":"9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892"} Feb 16 13:54:23 crc kubenswrapper[4816]: I0216 13:54:23.512028 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zgb5t" podStartSLOduration=2.043413078 podStartE2EDuration="4.512006786s" podCreationTimestamp="2026-02-16 13:54:19 +0000 UTC" firstStartedPulling="2026-02-16 13:54:20.469418323 +0000 UTC m=+3059.796132091" lastFinishedPulling="2026-02-16 13:54:22.938012031 +0000 UTC m=+3062.264725799" observedRunningTime="2026-02-16 13:54:23.508935403 +0000 UTC m=+3062.835649131" watchObservedRunningTime="2026-02-16 13:54:23.512006786 +0000 UTC m=+3062.838720514" Feb 16 13:54:29 crc kubenswrapper[4816]: I0216 13:54:29.725261 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:29 crc kubenswrapper[4816]: I0216 13:54:29.725873 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:29 crc kubenswrapper[4816]: I0216 13:54:29.783955 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:30 crc kubenswrapper[4816]: I0216 13:54:30.624382 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:30 crc kubenswrapper[4816]: I0216 13:54:30.690512 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zgb5t"] Feb 16 13:54:32 crc kubenswrapper[4816]: I0216 13:54:32.558521 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zgb5t" podUID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerName="registry-server" containerID="cri-o://9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892" gracePeriod=2 Feb 16 13:54:32 crc kubenswrapper[4816]: W0216 13:54:32.667132 4816 container.go:586] Failed to update stats for container "/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92bd01d5_9432_4b43_9ab2_27692882e5aa.slice/crio-8014ce9a603b7a8ac7a61c7e47027c17ed34454e577e25c722c4a9bdd591682c": error while statting cgroup v2: [unable to parse /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92bd01d5_9432_4b43_9ab2_27692882e5aa.slice/crio-8014ce9a603b7a8ac7a61c7e47027c17ed34454e577e25c722c4a9bdd591682c/memory.stat: read /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92bd01d5_9432_4b43_9ab2_27692882e5aa.slice/crio-8014ce9a603b7a8ac7a61c7e47027c17ed34454e577e25c722c4a9bdd591682c/memory.stat: no such device], continuing to push stats Feb 16 13:54:32 crc kubenswrapper[4816]: I0216 13:54:32.933716 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.058998 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-utilities\") pod \"92bd01d5-9432-4b43-9ab2-27692882e5aa\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.059056 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjwdd\" (UniqueName: \"kubernetes.io/projected/92bd01d5-9432-4b43-9ab2-27692882e5aa-kube-api-access-xjwdd\") pod \"92bd01d5-9432-4b43-9ab2-27692882e5aa\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.059144 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-catalog-content\") pod \"92bd01d5-9432-4b43-9ab2-27692882e5aa\" (UID: \"92bd01d5-9432-4b43-9ab2-27692882e5aa\") " Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.060598 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-utilities" (OuterVolumeSpecName: "utilities") pod "92bd01d5-9432-4b43-9ab2-27692882e5aa" (UID: "92bd01d5-9432-4b43-9ab2-27692882e5aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.070215 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92bd01d5-9432-4b43-9ab2-27692882e5aa-kube-api-access-xjwdd" (OuterVolumeSpecName: "kube-api-access-xjwdd") pod "92bd01d5-9432-4b43-9ab2-27692882e5aa" (UID: "92bd01d5-9432-4b43-9ab2-27692882e5aa"). InnerVolumeSpecName "kube-api-access-xjwdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.120549 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "92bd01d5-9432-4b43-9ab2-27692882e5aa" (UID: "92bd01d5-9432-4b43-9ab2-27692882e5aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.161026 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.161086 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjwdd\" (UniqueName: \"kubernetes.io/projected/92bd01d5-9432-4b43-9ab2-27692882e5aa-kube-api-access-xjwdd\") on node \"crc\" DevicePath \"\"" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.161108 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92bd01d5-9432-4b43-9ab2-27692882e5aa-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.565549 4816 generic.go:334] "Generic (PLEG): container finished" podID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerID="9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892" exitCode=0 Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.565596 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgb5t" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.565588 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgb5t" event={"ID":"92bd01d5-9432-4b43-9ab2-27692882e5aa","Type":"ContainerDied","Data":"9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892"} Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.566737 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgb5t" event={"ID":"92bd01d5-9432-4b43-9ab2-27692882e5aa","Type":"ContainerDied","Data":"8014ce9a603b7a8ac7a61c7e47027c17ed34454e577e25c722c4a9bdd591682c"} Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.566766 4816 scope.go:117] "RemoveContainer" containerID="9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.585648 4816 scope.go:117] "RemoveContainer" containerID="21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.587268 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zgb5t"] Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.592633 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zgb5t"] Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.605476 4816 scope.go:117] "RemoveContainer" containerID="3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.636410 4816 scope.go:117] "RemoveContainer" containerID="9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892" Feb 16 13:54:33 crc kubenswrapper[4816]: E0216 13:54:33.637027 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892\": container with ID starting with 9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892 not found: ID does not exist" containerID="9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.637071 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892"} err="failed to get container status \"9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892\": rpc error: code = NotFound desc = could not find container \"9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892\": container with ID starting with 9d050c715ff086357f4e0a6dbb959f5d80dba8583c352a32a1e2b66bfbe8c892 not found: ID does not exist" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.637098 4816 scope.go:117] "RemoveContainer" containerID="21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1" Feb 16 13:54:33 crc kubenswrapper[4816]: E0216 13:54:33.637526 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1\": container with ID starting with 21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1 not found: ID does not exist" containerID="21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.637582 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1"} err="failed to get container status \"21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1\": rpc error: code = NotFound desc = could not find container \"21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1\": container with ID starting with 21278a16f7d03c1e96ffa128ee2f0cdb783494961bdafeb58e45deba2fcc07a1 not found: ID does not exist" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.637620 4816 scope.go:117] "RemoveContainer" containerID="3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7" Feb 16 13:54:33 crc kubenswrapper[4816]: E0216 13:54:33.637950 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7\": container with ID starting with 3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7 not found: ID does not exist" containerID="3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7" Feb 16 13:54:33 crc kubenswrapper[4816]: I0216 13:54:33.637986 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7"} err="failed to get container status \"3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7\": rpc error: code = NotFound desc = could not find container \"3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7\": container with ID starting with 3b076aba139817107aab1b1aabb74599a0b60840982179d14df7fd98180de6a7 not found: ID does not exist" Feb 16 13:54:35 crc kubenswrapper[4816]: I0216 13:54:35.407580 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92bd01d5-9432-4b43-9ab2-27692882e5aa" path="/var/lib/kubelet/pods/92bd01d5-9432-4b43-9ab2-27692882e5aa/volumes" Feb 16 13:54:36 crc kubenswrapper[4816]: I0216 13:54:36.940470 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:54:36 crc kubenswrapper[4816]: I0216 13:54:36.940535 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:55:06 crc kubenswrapper[4816]: I0216 13:55:06.940983 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:55:06 crc kubenswrapper[4816]: I0216 13:55:06.941571 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:55:36 crc kubenswrapper[4816]: I0216 13:55:36.940562 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 13:55:36 crc kubenswrapper[4816]: I0216 13:55:36.941190 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 13:55:36 crc kubenswrapper[4816]: I0216 13:55:36.941231 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 13:55:36 crc kubenswrapper[4816]: I0216 13:55:36.941870 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 13:55:36 crc kubenswrapper[4816]: I0216 13:55:36.941956 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" gracePeriod=600 Feb 16 13:55:37 crc kubenswrapper[4816]: E0216 13:55:37.061738 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:55:38 crc kubenswrapper[4816]: I0216 13:55:38.068297 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" exitCode=0 Feb 16 13:55:38 crc kubenswrapper[4816]: I0216 13:55:38.068343 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c"} Feb 16 13:55:38 crc kubenswrapper[4816]: I0216 13:55:38.068373 4816 scope.go:117] "RemoveContainer" containerID="1b44e621f66d5c607087cc8c5f5e81b0f4e01c9a3aaa1ee8f561d1323e3679e6" Feb 16 13:55:38 crc kubenswrapper[4816]: I0216 13:55:38.068927 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:55:38 crc kubenswrapper[4816]: E0216 13:55:38.069187 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:55:49 crc kubenswrapper[4816]: I0216 13:55:49.398196 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:55:49 crc kubenswrapper[4816]: E0216 13:55:49.398893 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:56:00 crc kubenswrapper[4816]: I0216 13:56:00.399213 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:56:00 crc kubenswrapper[4816]: E0216 13:56:00.400037 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:56:14 crc kubenswrapper[4816]: I0216 13:56:14.425757 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:56:14 crc kubenswrapper[4816]: E0216 13:56:14.426968 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:56:26 crc kubenswrapper[4816]: I0216 13:56:26.399495 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:56:26 crc kubenswrapper[4816]: E0216 13:56:26.400080 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:56:38 crc kubenswrapper[4816]: I0216 13:56:38.399120 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:56:38 crc kubenswrapper[4816]: E0216 13:56:38.399962 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:56:52 crc kubenswrapper[4816]: I0216 13:56:52.399138 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:56:52 crc kubenswrapper[4816]: E0216 13:56:52.400002 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:57:05 crc kubenswrapper[4816]: I0216 13:57:05.399370 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:57:05 crc kubenswrapper[4816]: E0216 13:57:05.400073 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:57:20 crc kubenswrapper[4816]: I0216 13:57:20.399398 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:57:20 crc kubenswrapper[4816]: E0216 13:57:20.399895 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:57:34 crc kubenswrapper[4816]: I0216 13:57:34.398329 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:57:34 crc kubenswrapper[4816]: E0216 13:57:34.399135 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:57:48 crc kubenswrapper[4816]: I0216 13:57:48.398776 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:57:48 crc kubenswrapper[4816]: E0216 13:57:48.399722 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:57:59 crc kubenswrapper[4816]: I0216 13:57:59.399208 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:57:59 crc kubenswrapper[4816]: E0216 13:57:59.399966 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:58:12 crc kubenswrapper[4816]: I0216 13:58:12.398678 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:58:12 crc kubenswrapper[4816]: E0216 13:58:12.399369 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:58:25 crc kubenswrapper[4816]: I0216 13:58:25.399336 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:58:25 crc kubenswrapper[4816]: E0216 13:58:25.400107 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:58:37 crc kubenswrapper[4816]: I0216 13:58:37.399472 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:58:37 crc kubenswrapper[4816]: E0216 13:58:37.400550 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:58:48 crc kubenswrapper[4816]: I0216 13:58:48.398417 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:58:48 crc kubenswrapper[4816]: E0216 13:58:48.399220 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:58:59 crc kubenswrapper[4816]: I0216 13:58:59.399259 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:58:59 crc kubenswrapper[4816]: E0216 13:58:59.400116 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:59:11 crc kubenswrapper[4816]: I0216 13:59:11.404936 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:59:11 crc kubenswrapper[4816]: E0216 13:59:11.406163 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:59:22 crc kubenswrapper[4816]: I0216 13:59:22.399714 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:59:22 crc kubenswrapper[4816]: E0216 13:59:22.400898 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:59:37 crc kubenswrapper[4816]: I0216 13:59:37.398443 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:59:37 crc kubenswrapper[4816]: E0216 13:59:37.399231 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 13:59:51 crc kubenswrapper[4816]: I0216 13:59:51.408448 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 13:59:51 crc kubenswrapper[4816]: E0216 13:59:51.409759 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.172156 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp"] Feb 16 14:00:00 crc kubenswrapper[4816]: E0216 14:00:00.173237 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerName="registry-server" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.173271 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerName="registry-server" Feb 16 14:00:00 crc kubenswrapper[4816]: E0216 14:00:00.173315 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerName="extract-utilities" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.173324 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerName="extract-utilities" Feb 16 14:00:00 crc kubenswrapper[4816]: E0216 14:00:00.173338 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerName="extract-content" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.173348 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerName="extract-content" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.173523 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="92bd01d5-9432-4b43-9ab2-27692882e5aa" containerName="registry-server" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.174142 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.176636 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.177397 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.180003 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp"] Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.201770 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-secret-volume\") pod \"collect-profiles-29520840-mqfvp\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.201896 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snm5w\" (UniqueName: \"kubernetes.io/projected/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-kube-api-access-snm5w\") pod \"collect-profiles-29520840-mqfvp\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.201928 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-config-volume\") pod \"collect-profiles-29520840-mqfvp\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.303498 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snm5w\" (UniqueName: \"kubernetes.io/projected/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-kube-api-access-snm5w\") pod \"collect-profiles-29520840-mqfvp\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.303575 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-config-volume\") pod \"collect-profiles-29520840-mqfvp\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.303637 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-secret-volume\") pod \"collect-profiles-29520840-mqfvp\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.304696 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-config-volume\") pod \"collect-profiles-29520840-mqfvp\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.310924 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-secret-volume\") pod \"collect-profiles-29520840-mqfvp\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.323418 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snm5w\" (UniqueName: \"kubernetes.io/projected/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-kube-api-access-snm5w\") pod \"collect-profiles-29520840-mqfvp\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.495527 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:00 crc kubenswrapper[4816]: I0216 14:00:00.967720 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp"] Feb 16 14:00:01 crc kubenswrapper[4816]: I0216 14:00:01.417046 4816 generic.go:334] "Generic (PLEG): container finished" podID="84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15" containerID="87e9679b1f6263fb3e21b272f03939b5b7d49f31e9febe687650d8061b82d225" exitCode=0 Feb 16 14:00:01 crc kubenswrapper[4816]: I0216 14:00:01.417138 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" event={"ID":"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15","Type":"ContainerDied","Data":"87e9679b1f6263fb3e21b272f03939b5b7d49f31e9febe687650d8061b82d225"} Feb 16 14:00:01 crc kubenswrapper[4816]: I0216 14:00:01.417451 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" event={"ID":"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15","Type":"ContainerStarted","Data":"e154f0212a5bcd16d8c7cd5a1ca439a4663cd34dd1bcad3976a2c3878f1212f4"} Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.708180 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.747398 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-secret-volume\") pod \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.747634 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snm5w\" (UniqueName: \"kubernetes.io/projected/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-kube-api-access-snm5w\") pod \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.747705 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-config-volume\") pod \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\" (UID: \"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15\") " Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.748275 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-config-volume" (OuterVolumeSpecName: "config-volume") pod "84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15" (UID: "84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.752999 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15" (UID: "84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.753270 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-kube-api-access-snm5w" (OuterVolumeSpecName: "kube-api-access-snm5w") pod "84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15" (UID: "84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15"). InnerVolumeSpecName "kube-api-access-snm5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.849384 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snm5w\" (UniqueName: \"kubernetes.io/projected/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-kube-api-access-snm5w\") on node \"crc\" DevicePath \"\"" Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.849427 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 14:00:02 crc kubenswrapper[4816]: I0216 14:00:02.849435 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 14:00:03 crc kubenswrapper[4816]: I0216 14:00:03.433463 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" event={"ID":"84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15","Type":"ContainerDied","Data":"e154f0212a5bcd16d8c7cd5a1ca439a4663cd34dd1bcad3976a2c3878f1212f4"} Feb 16 14:00:03 crc kubenswrapper[4816]: I0216 14:00:03.433988 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e154f0212a5bcd16d8c7cd5a1ca439a4663cd34dd1bcad3976a2c3878f1212f4" Feb 16 14:00:03 crc kubenswrapper[4816]: I0216 14:00:03.433518 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp" Feb 16 14:00:03 crc kubenswrapper[4816]: I0216 14:00:03.805324 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz"] Feb 16 14:00:03 crc kubenswrapper[4816]: I0216 14:00:03.812982 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520795-qb7gz"] Feb 16 14:00:05 crc kubenswrapper[4816]: I0216 14:00:05.399238 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 14:00:05 crc kubenswrapper[4816]: E0216 14:00:05.400397 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:00:05 crc kubenswrapper[4816]: I0216 14:00:05.411614 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1870327-3f1d-4a1a-b8ac-b463db7dc7ee" path="/var/lib/kubelet/pods/c1870327-3f1d-4a1a-b8ac-b463db7dc7ee/volumes" Feb 16 14:00:17 crc kubenswrapper[4816]: I0216 14:00:17.399415 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 14:00:17 crc kubenswrapper[4816]: E0216 14:00:17.404016 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:00:26 crc kubenswrapper[4816]: I0216 14:00:26.476370 4816 scope.go:117] "RemoveContainer" containerID="15bb0fd0ce57f2d60f2e13245b8c0efdbd3c1116e6082b1cfe0317dc9a3db27a" Feb 16 14:00:31 crc kubenswrapper[4816]: I0216 14:00:31.403673 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 14:00:31 crc kubenswrapper[4816]: E0216 14:00:31.404304 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:00:34 crc kubenswrapper[4816]: I0216 14:00:34.802144 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7wsp8"] Feb 16 14:00:34 crc kubenswrapper[4816]: E0216 14:00:34.804178 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15" containerName="collect-profiles" Feb 16 14:00:34 crc kubenswrapper[4816]: I0216 14:00:34.804277 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15" containerName="collect-profiles" Feb 16 14:00:34 crc kubenswrapper[4816]: I0216 14:00:34.804481 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15" containerName="collect-profiles" Feb 16 14:00:34 crc kubenswrapper[4816]: I0216 14:00:34.805642 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:34 crc kubenswrapper[4816]: I0216 14:00:34.819227 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7wsp8"] Feb 16 14:00:34 crc kubenswrapper[4816]: I0216 14:00:34.984368 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-utilities\") pod \"redhat-operators-7wsp8\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:34 crc kubenswrapper[4816]: I0216 14:00:34.984799 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-catalog-content\") pod \"redhat-operators-7wsp8\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:34 crc kubenswrapper[4816]: I0216 14:00:34.984962 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwjgw\" (UniqueName: \"kubernetes.io/projected/f4961238-a988-4ee1-a41a-749181d06214-kube-api-access-lwjgw\") pod \"redhat-operators-7wsp8\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:35 crc kubenswrapper[4816]: I0216 14:00:35.086574 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-catalog-content\") pod \"redhat-operators-7wsp8\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:35 crc kubenswrapper[4816]: I0216 14:00:35.086697 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwjgw\" (UniqueName: \"kubernetes.io/projected/f4961238-a988-4ee1-a41a-749181d06214-kube-api-access-lwjgw\") pod \"redhat-operators-7wsp8\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:35 crc kubenswrapper[4816]: I0216 14:00:35.086777 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-utilities\") pod \"redhat-operators-7wsp8\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:35 crc kubenswrapper[4816]: I0216 14:00:35.087481 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-utilities\") pod \"redhat-operators-7wsp8\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:35 crc kubenswrapper[4816]: I0216 14:00:35.087681 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-catalog-content\") pod \"redhat-operators-7wsp8\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:35 crc kubenswrapper[4816]: I0216 14:00:35.115017 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwjgw\" (UniqueName: \"kubernetes.io/projected/f4961238-a988-4ee1-a41a-749181d06214-kube-api-access-lwjgw\") pod \"redhat-operators-7wsp8\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:35 crc kubenswrapper[4816]: I0216 14:00:35.130280 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:35 crc kubenswrapper[4816]: I0216 14:00:35.585570 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7wsp8"] Feb 16 14:00:35 crc kubenswrapper[4816]: I0216 14:00:35.690913 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wsp8" event={"ID":"f4961238-a988-4ee1-a41a-749181d06214","Type":"ContainerStarted","Data":"cae57055798a9a322c231408dfed07b165c14d1c3f9f829c99d65ab335b4d73c"} Feb 16 14:00:36 crc kubenswrapper[4816]: I0216 14:00:36.699343 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4961238-a988-4ee1-a41a-749181d06214" containerID="1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a" exitCode=0 Feb 16 14:00:36 crc kubenswrapper[4816]: I0216 14:00:36.699409 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wsp8" event={"ID":"f4961238-a988-4ee1-a41a-749181d06214","Type":"ContainerDied","Data":"1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a"} Feb 16 14:00:36 crc kubenswrapper[4816]: I0216 14:00:36.701884 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 14:00:37 crc kubenswrapper[4816]: I0216 14:00:37.707959 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wsp8" event={"ID":"f4961238-a988-4ee1-a41a-749181d06214","Type":"ContainerStarted","Data":"c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc"} Feb 16 14:00:38 crc kubenswrapper[4816]: I0216 14:00:38.719272 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4961238-a988-4ee1-a41a-749181d06214" containerID="c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc" exitCode=0 Feb 16 14:00:38 crc kubenswrapper[4816]: I0216 14:00:38.719370 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wsp8" event={"ID":"f4961238-a988-4ee1-a41a-749181d06214","Type":"ContainerDied","Data":"c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc"} Feb 16 14:00:39 crc kubenswrapper[4816]: I0216 14:00:39.729083 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wsp8" event={"ID":"f4961238-a988-4ee1-a41a-749181d06214","Type":"ContainerStarted","Data":"7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4"} Feb 16 14:00:39 crc kubenswrapper[4816]: I0216 14:00:39.745357 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7wsp8" podStartSLOduration=3.302889638 podStartE2EDuration="5.745330593s" podCreationTimestamp="2026-02-16 14:00:34 +0000 UTC" firstStartedPulling="2026-02-16 14:00:36.701559267 +0000 UTC m=+3436.028273015" lastFinishedPulling="2026-02-16 14:00:39.144000242 +0000 UTC m=+3438.470713970" observedRunningTime="2026-02-16 14:00:39.743814041 +0000 UTC m=+3439.070527809" watchObservedRunningTime="2026-02-16 14:00:39.745330593 +0000 UTC m=+3439.072044361" Feb 16 14:00:42 crc kubenswrapper[4816]: I0216 14:00:42.399054 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 14:00:42 crc kubenswrapper[4816]: I0216 14:00:42.752620 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"0649fa5808eccbffa2a2eae8aa2f155fd777d857add32d64e5eb8d7d9e815cb8"} Feb 16 14:00:45 crc kubenswrapper[4816]: I0216 14:00:45.130846 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:45 crc kubenswrapper[4816]: I0216 14:00:45.131345 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:46 crc kubenswrapper[4816]: I0216 14:00:46.177203 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7wsp8" podUID="f4961238-a988-4ee1-a41a-749181d06214" containerName="registry-server" probeResult="failure" output=< Feb 16 14:00:46 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 14:00:46 crc kubenswrapper[4816]: > Feb 16 14:00:49 crc kubenswrapper[4816]: I0216 14:00:49.973926 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8x96m"] Feb 16 14:00:49 crc kubenswrapper[4816]: I0216 14:00:49.976532 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:49 crc kubenswrapper[4816]: I0216 14:00:49.989901 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8x96m"] Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.134386 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-catalog-content\") pod \"community-operators-8x96m\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.134567 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-utilities\") pod \"community-operators-8x96m\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.134613 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqzcb\" (UniqueName: \"kubernetes.io/projected/13702f48-2b62-4ab0-9170-a53ff65d569a-kube-api-access-fqzcb\") pod \"community-operators-8x96m\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.235500 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-utilities\") pod \"community-operators-8x96m\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.235551 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqzcb\" (UniqueName: \"kubernetes.io/projected/13702f48-2b62-4ab0-9170-a53ff65d569a-kube-api-access-fqzcb\") pod \"community-operators-8x96m\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.235571 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-catalog-content\") pod \"community-operators-8x96m\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.236030 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-catalog-content\") pod \"community-operators-8x96m\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.236307 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-utilities\") pod \"community-operators-8x96m\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.255616 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqzcb\" (UniqueName: \"kubernetes.io/projected/13702f48-2b62-4ab0-9170-a53ff65d569a-kube-api-access-fqzcb\") pod \"community-operators-8x96m\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.305219 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:00:50 crc kubenswrapper[4816]: I0216 14:00:50.900467 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8x96m"] Feb 16 14:00:51 crc kubenswrapper[4816]: I0216 14:00:51.807525 4816 generic.go:334] "Generic (PLEG): container finished" podID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerID="0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1" exitCode=0 Feb 16 14:00:51 crc kubenswrapper[4816]: I0216 14:00:51.807783 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8x96m" event={"ID":"13702f48-2b62-4ab0-9170-a53ff65d569a","Type":"ContainerDied","Data":"0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1"} Feb 16 14:00:51 crc kubenswrapper[4816]: I0216 14:00:51.807864 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8x96m" event={"ID":"13702f48-2b62-4ab0-9170-a53ff65d569a","Type":"ContainerStarted","Data":"766dba5b2afd3673b97dfc1e3891753115ee43b985baef7f11da28b220b19458"} Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.582888 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2dwk6"] Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.588428 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.595460 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dwk6"] Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.771168 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-catalog-content\") pod \"redhat-marketplace-2dwk6\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.771755 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pthh\" (UniqueName: \"kubernetes.io/projected/00c4319c-beca-46de-bc82-4a36a51a2fa0-kube-api-access-6pthh\") pod \"redhat-marketplace-2dwk6\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.771920 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-utilities\") pod \"redhat-marketplace-2dwk6\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.815349 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8x96m" event={"ID":"13702f48-2b62-4ab0-9170-a53ff65d569a","Type":"ContainerStarted","Data":"1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed"} Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.873691 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-catalog-content\") pod \"redhat-marketplace-2dwk6\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.873928 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pthh\" (UniqueName: \"kubernetes.io/projected/00c4319c-beca-46de-bc82-4a36a51a2fa0-kube-api-access-6pthh\") pod \"redhat-marketplace-2dwk6\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.873985 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-utilities\") pod \"redhat-marketplace-2dwk6\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.874229 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-catalog-content\") pod \"redhat-marketplace-2dwk6\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.874467 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-utilities\") pod \"redhat-marketplace-2dwk6\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:52 crc kubenswrapper[4816]: I0216 14:00:52.906117 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pthh\" (UniqueName: \"kubernetes.io/projected/00c4319c-beca-46de-bc82-4a36a51a2fa0-kube-api-access-6pthh\") pod \"redhat-marketplace-2dwk6\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:53 crc kubenswrapper[4816]: I0216 14:00:53.206500 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:00:53 crc kubenswrapper[4816]: I0216 14:00:53.627019 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dwk6"] Feb 16 14:00:53 crc kubenswrapper[4816]: I0216 14:00:53.842012 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dwk6" event={"ID":"00c4319c-beca-46de-bc82-4a36a51a2fa0","Type":"ContainerStarted","Data":"f507cda20d9efddfde0f0c5d0c91b425c6e0ca9dee7aabd5733212cd13f3f3d8"} Feb 16 14:00:53 crc kubenswrapper[4816]: I0216 14:00:53.856384 4816 generic.go:334] "Generic (PLEG): container finished" podID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerID="1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed" exitCode=0 Feb 16 14:00:53 crc kubenswrapper[4816]: I0216 14:00:53.856607 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8x96m" event={"ID":"13702f48-2b62-4ab0-9170-a53ff65d569a","Type":"ContainerDied","Data":"1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed"} Feb 16 14:00:54 crc kubenswrapper[4816]: I0216 14:00:54.864009 4816 generic.go:334] "Generic (PLEG): container finished" podID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerID="8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11" exitCode=0 Feb 16 14:00:54 crc kubenswrapper[4816]: I0216 14:00:54.864082 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dwk6" event={"ID":"00c4319c-beca-46de-bc82-4a36a51a2fa0","Type":"ContainerDied","Data":"8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11"} Feb 16 14:00:54 crc kubenswrapper[4816]: I0216 14:00:54.866125 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8x96m" event={"ID":"13702f48-2b62-4ab0-9170-a53ff65d569a","Type":"ContainerStarted","Data":"4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e"} Feb 16 14:00:54 crc kubenswrapper[4816]: I0216 14:00:54.902879 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8x96m" podStartSLOduration=3.166469898 podStartE2EDuration="5.902858125s" podCreationTimestamp="2026-02-16 14:00:49 +0000 UTC" firstStartedPulling="2026-02-16 14:00:51.809382085 +0000 UTC m=+3451.136095813" lastFinishedPulling="2026-02-16 14:00:54.545770312 +0000 UTC m=+3453.872484040" observedRunningTime="2026-02-16 14:00:54.897826618 +0000 UTC m=+3454.224540356" watchObservedRunningTime="2026-02-16 14:00:54.902858125 +0000 UTC m=+3454.229571853" Feb 16 14:00:55 crc kubenswrapper[4816]: I0216 14:00:55.174264 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:55 crc kubenswrapper[4816]: I0216 14:00:55.219010 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:55 crc kubenswrapper[4816]: I0216 14:00:55.768069 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7wsp8"] Feb 16 14:00:56 crc kubenswrapper[4816]: I0216 14:00:56.884120 4816 generic.go:334] "Generic (PLEG): container finished" podID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerID="aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d" exitCode=0 Feb 16 14:00:56 crc kubenswrapper[4816]: I0216 14:00:56.884179 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dwk6" event={"ID":"00c4319c-beca-46de-bc82-4a36a51a2fa0","Type":"ContainerDied","Data":"aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d"} Feb 16 14:00:56 crc kubenswrapper[4816]: I0216 14:00:56.884377 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7wsp8" podUID="f4961238-a988-4ee1-a41a-749181d06214" containerName="registry-server" containerID="cri-o://7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4" gracePeriod=2 Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.396059 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.484058 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwjgw\" (UniqueName: \"kubernetes.io/projected/f4961238-a988-4ee1-a41a-749181d06214-kube-api-access-lwjgw\") pod \"f4961238-a988-4ee1-a41a-749181d06214\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.484119 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-catalog-content\") pod \"f4961238-a988-4ee1-a41a-749181d06214\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.484229 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-utilities\") pod \"f4961238-a988-4ee1-a41a-749181d06214\" (UID: \"f4961238-a988-4ee1-a41a-749181d06214\") " Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.485311 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-utilities" (OuterVolumeSpecName: "utilities") pod "f4961238-a988-4ee1-a41a-749181d06214" (UID: "f4961238-a988-4ee1-a41a-749181d06214"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.498417 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4961238-a988-4ee1-a41a-749181d06214-kube-api-access-lwjgw" (OuterVolumeSpecName: "kube-api-access-lwjgw") pod "f4961238-a988-4ee1-a41a-749181d06214" (UID: "f4961238-a988-4ee1-a41a-749181d06214"). InnerVolumeSpecName "kube-api-access-lwjgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.585765 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.585804 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwjgw\" (UniqueName: \"kubernetes.io/projected/f4961238-a988-4ee1-a41a-749181d06214-kube-api-access-lwjgw\") on node \"crc\" DevicePath \"\"" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.633701 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4961238-a988-4ee1-a41a-749181d06214" (UID: "f4961238-a988-4ee1-a41a-749181d06214"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.686796 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4961238-a988-4ee1-a41a-749181d06214-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.893359 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4961238-a988-4ee1-a41a-749181d06214" containerID="7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4" exitCode=0 Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.893432 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wsp8" event={"ID":"f4961238-a988-4ee1-a41a-749181d06214","Type":"ContainerDied","Data":"7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4"} Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.893436 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7wsp8" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.893481 4816 scope.go:117] "RemoveContainer" containerID="7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.893470 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wsp8" event={"ID":"f4961238-a988-4ee1-a41a-749181d06214","Type":"ContainerDied","Data":"cae57055798a9a322c231408dfed07b165c14d1c3f9f829c99d65ab335b4d73c"} Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.895598 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dwk6" event={"ID":"00c4319c-beca-46de-bc82-4a36a51a2fa0","Type":"ContainerStarted","Data":"2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710"} Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.909467 4816 scope.go:117] "RemoveContainer" containerID="c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.938833 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2dwk6" podStartSLOduration=3.515607617 podStartE2EDuration="5.938812327s" podCreationTimestamp="2026-02-16 14:00:52 +0000 UTC" firstStartedPulling="2026-02-16 14:00:54.865510927 +0000 UTC m=+3454.192224655" lastFinishedPulling="2026-02-16 14:00:57.288715637 +0000 UTC m=+3456.615429365" observedRunningTime="2026-02-16 14:00:57.925609337 +0000 UTC m=+3457.252323065" watchObservedRunningTime="2026-02-16 14:00:57.938812327 +0000 UTC m=+3457.265526065" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.946468 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7wsp8"] Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.946780 4816 scope.go:117] "RemoveContainer" containerID="1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.956173 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7wsp8"] Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.966263 4816 scope.go:117] "RemoveContainer" containerID="7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4" Feb 16 14:00:57 crc kubenswrapper[4816]: E0216 14:00:57.966890 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4\": container with ID starting with 7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4 not found: ID does not exist" containerID="7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.967031 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4"} err="failed to get container status \"7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4\": rpc error: code = NotFound desc = could not find container \"7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4\": container with ID starting with 7ef9db0e893785a013e31da1ee689d78fb7ca4317de2d1ceccbe5833398df9e4 not found: ID does not exist" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.967137 4816 scope.go:117] "RemoveContainer" containerID="c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc" Feb 16 14:00:57 crc kubenswrapper[4816]: E0216 14:00:57.968296 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc\": container with ID starting with c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc not found: ID does not exist" containerID="c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.968327 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc"} err="failed to get container status \"c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc\": rpc error: code = NotFound desc = could not find container \"c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc\": container with ID starting with c3a458d251b7460f3951650ceafbcfd6b9f72814cfde584edc6835e17297edcc not found: ID does not exist" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.968347 4816 scope.go:117] "RemoveContainer" containerID="1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a" Feb 16 14:00:57 crc kubenswrapper[4816]: E0216 14:00:57.968726 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a\": container with ID starting with 1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a not found: ID does not exist" containerID="1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a" Feb 16 14:00:57 crc kubenswrapper[4816]: I0216 14:00:57.968873 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a"} err="failed to get container status \"1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a\": rpc error: code = NotFound desc = could not find container \"1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a\": container with ID starting with 1fb49aa9290039b20c9b09a7a5bf10b3bf20c22d98328d2359373bbfc9f9084a not found: ID does not exist" Feb 16 14:00:59 crc kubenswrapper[4816]: I0216 14:00:59.408848 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4961238-a988-4ee1-a41a-749181d06214" path="/var/lib/kubelet/pods/f4961238-a988-4ee1-a41a-749181d06214/volumes" Feb 16 14:01:00 crc kubenswrapper[4816]: I0216 14:01:00.306927 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:01:00 crc kubenswrapper[4816]: I0216 14:01:00.307183 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:01:00 crc kubenswrapper[4816]: I0216 14:01:00.364812 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:01:00 crc kubenswrapper[4816]: I0216 14:01:00.994377 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:01:02 crc kubenswrapper[4816]: I0216 14:01:02.166484 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8x96m"] Feb 16 14:01:03 crc kubenswrapper[4816]: I0216 14:01:03.206708 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:01:03 crc kubenswrapper[4816]: I0216 14:01:03.207048 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:01:03 crc kubenswrapper[4816]: I0216 14:01:03.249113 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:01:03 crc kubenswrapper[4816]: I0216 14:01:03.942143 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8x96m" podUID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerName="registry-server" containerID="cri-o://4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e" gracePeriod=2 Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.089810 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.461301 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.491541 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-utilities\") pod \"13702f48-2b62-4ab0-9170-a53ff65d569a\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.492437 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-catalog-content\") pod \"13702f48-2b62-4ab0-9170-a53ff65d569a\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.492617 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqzcb\" (UniqueName: \"kubernetes.io/projected/13702f48-2b62-4ab0-9170-a53ff65d569a-kube-api-access-fqzcb\") pod \"13702f48-2b62-4ab0-9170-a53ff65d569a\" (UID: \"13702f48-2b62-4ab0-9170-a53ff65d569a\") " Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.494912 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-utilities" (OuterVolumeSpecName: "utilities") pod "13702f48-2b62-4ab0-9170-a53ff65d569a" (UID: "13702f48-2b62-4ab0-9170-a53ff65d569a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.498058 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13702f48-2b62-4ab0-9170-a53ff65d569a-kube-api-access-fqzcb" (OuterVolumeSpecName: "kube-api-access-fqzcb") pod "13702f48-2b62-4ab0-9170-a53ff65d569a" (UID: "13702f48-2b62-4ab0-9170-a53ff65d569a"). InnerVolumeSpecName "kube-api-access-fqzcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.550814 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "13702f48-2b62-4ab0-9170-a53ff65d569a" (UID: "13702f48-2b62-4ab0-9170-a53ff65d569a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.595751 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.595812 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13702f48-2b62-4ab0-9170-a53ff65d569a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.595852 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqzcb\" (UniqueName: \"kubernetes.io/projected/13702f48-2b62-4ab0-9170-a53ff65d569a-kube-api-access-fqzcb\") on node \"crc\" DevicePath \"\"" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.951817 4816 generic.go:334] "Generic (PLEG): container finished" podID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerID="4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e" exitCode=0 Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.951890 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8x96m" event={"ID":"13702f48-2b62-4ab0-9170-a53ff65d569a","Type":"ContainerDied","Data":"4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e"} Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.951963 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8x96m" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.951985 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8x96m" event={"ID":"13702f48-2b62-4ab0-9170-a53ff65d569a","Type":"ContainerDied","Data":"766dba5b2afd3673b97dfc1e3891753115ee43b985baef7f11da28b220b19458"} Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.952030 4816 scope.go:117] "RemoveContainer" containerID="4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.974228 4816 scope.go:117] "RemoveContainer" containerID="1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed" Feb 16 14:01:04 crc kubenswrapper[4816]: I0216 14:01:04.984986 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8x96m"] Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.000372 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8x96m"] Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.006092 4816 scope.go:117] "RemoveContainer" containerID="0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1" Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.028250 4816 scope.go:117] "RemoveContainer" containerID="4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e" Feb 16 14:01:05 crc kubenswrapper[4816]: E0216 14:01:05.028646 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e\": container with ID starting with 4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e not found: ID does not exist" containerID="4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e" Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.028700 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e"} err="failed to get container status \"4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e\": rpc error: code = NotFound desc = could not find container \"4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e\": container with ID starting with 4587386b890efb72a5d909ff0ab58d23c0a3dc1ec719d0bd25a9eb89145d932e not found: ID does not exist" Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.028756 4816 scope.go:117] "RemoveContainer" containerID="1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed" Feb 16 14:01:05 crc kubenswrapper[4816]: E0216 14:01:05.028964 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed\": container with ID starting with 1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed not found: ID does not exist" containerID="1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed" Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.028990 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed"} err="failed to get container status \"1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed\": rpc error: code = NotFound desc = could not find container \"1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed\": container with ID starting with 1e45d990dffaa94a0ec1fccdc9a0c09df33cc641ddb0c4f74bbff0431db619ed not found: ID does not exist" Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.029008 4816 scope.go:117] "RemoveContainer" containerID="0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1" Feb 16 14:01:05 crc kubenswrapper[4816]: E0216 14:01:05.029458 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1\": container with ID starting with 0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1 not found: ID does not exist" containerID="0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1" Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.029505 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1"} err="failed to get container status \"0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1\": rpc error: code = NotFound desc = could not find container \"0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1\": container with ID starting with 0a917cb72327d0ee01da5db2102247c67f72c5610cc7c3fc24d5cb678fcf98f1 not found: ID does not exist" Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.420405 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13702f48-2b62-4ab0-9170-a53ff65d569a" path="/var/lib/kubelet/pods/13702f48-2b62-4ab0-9170-a53ff65d569a/volumes" Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.563883 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dwk6"] Feb 16 14:01:05 crc kubenswrapper[4816]: I0216 14:01:05.960990 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2dwk6" podUID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerName="registry-server" containerID="cri-o://2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710" gracePeriod=2 Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.410401 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.419422 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-utilities\") pod \"00c4319c-beca-46de-bc82-4a36a51a2fa0\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.419587 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-catalog-content\") pod \"00c4319c-beca-46de-bc82-4a36a51a2fa0\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.419616 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pthh\" (UniqueName: \"kubernetes.io/projected/00c4319c-beca-46de-bc82-4a36a51a2fa0-kube-api-access-6pthh\") pod \"00c4319c-beca-46de-bc82-4a36a51a2fa0\" (UID: \"00c4319c-beca-46de-bc82-4a36a51a2fa0\") " Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.421393 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-utilities" (OuterVolumeSpecName: "utilities") pod "00c4319c-beca-46de-bc82-4a36a51a2fa0" (UID: "00c4319c-beca-46de-bc82-4a36a51a2fa0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.429932 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00c4319c-beca-46de-bc82-4a36a51a2fa0-kube-api-access-6pthh" (OuterVolumeSpecName: "kube-api-access-6pthh") pod "00c4319c-beca-46de-bc82-4a36a51a2fa0" (UID: "00c4319c-beca-46de-bc82-4a36a51a2fa0"). InnerVolumeSpecName "kube-api-access-6pthh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.461095 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "00c4319c-beca-46de-bc82-4a36a51a2fa0" (UID: "00c4319c-beca-46de-bc82-4a36a51a2fa0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.521221 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.521648 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00c4319c-beca-46de-bc82-4a36a51a2fa0-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.521848 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pthh\" (UniqueName: \"kubernetes.io/projected/00c4319c-beca-46de-bc82-4a36a51a2fa0-kube-api-access-6pthh\") on node \"crc\" DevicePath \"\"" Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.970139 4816 generic.go:334] "Generic (PLEG): container finished" podID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerID="2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710" exitCode=0 Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.970208 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dwk6" event={"ID":"00c4319c-beca-46de-bc82-4a36a51a2fa0","Type":"ContainerDied","Data":"2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710"} Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.970242 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dwk6" Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.970793 4816 scope.go:117] "RemoveContainer" containerID="2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710" Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.970771 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dwk6" event={"ID":"00c4319c-beca-46de-bc82-4a36a51a2fa0","Type":"ContainerDied","Data":"f507cda20d9efddfde0f0c5d0c91b425c6e0ca9dee7aabd5733212cd13f3f3d8"} Feb 16 14:01:06 crc kubenswrapper[4816]: I0216 14:01:06.989156 4816 scope.go:117] "RemoveContainer" containerID="aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d" Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.014413 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dwk6"] Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.024143 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dwk6"] Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.024318 4816 scope.go:117] "RemoveContainer" containerID="8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11" Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.043781 4816 scope.go:117] "RemoveContainer" containerID="2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710" Feb 16 14:01:07 crc kubenswrapper[4816]: E0216 14:01:07.044282 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710\": container with ID starting with 2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710 not found: ID does not exist" containerID="2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710" Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.044324 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710"} err="failed to get container status \"2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710\": rpc error: code = NotFound desc = could not find container \"2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710\": container with ID starting with 2efbe43a4aa19e3128d9b959548aba388a02682cc8eab14c6612cc8942d2b710 not found: ID does not exist" Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.044349 4816 scope.go:117] "RemoveContainer" containerID="aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d" Feb 16 14:01:07 crc kubenswrapper[4816]: E0216 14:01:07.044846 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d\": container with ID starting with aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d not found: ID does not exist" containerID="aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d" Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.044910 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d"} err="failed to get container status \"aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d\": rpc error: code = NotFound desc = could not find container \"aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d\": container with ID starting with aac78fe3ea2772d7eea448817f1d63c233f93ca4ff705ae337897cecddc7834d not found: ID does not exist" Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.044949 4816 scope.go:117] "RemoveContainer" containerID="8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11" Feb 16 14:01:07 crc kubenswrapper[4816]: E0216 14:01:07.045243 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11\": container with ID starting with 8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11 not found: ID does not exist" containerID="8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11" Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.045264 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11"} err="failed to get container status \"8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11\": rpc error: code = NotFound desc = could not find container \"8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11\": container with ID starting with 8fca5335f89239b7ae58b2be29b11e2432969be454384a1c6498491222940e11 not found: ID does not exist" Feb 16 14:01:07 crc kubenswrapper[4816]: I0216 14:01:07.413892 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00c4319c-beca-46de-bc82-4a36a51a2fa0" path="/var/lib/kubelet/pods/00c4319c-beca-46de-bc82-4a36a51a2fa0/volumes" Feb 16 14:02:14 crc kubenswrapper[4816]: I0216 14:02:14.319116 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd" podUID="ee88d6f1-148e-4a18-ae88-4bdda1df4d65" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.83:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:03:06 crc kubenswrapper[4816]: I0216 14:03:06.940696 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:03:06 crc kubenswrapper[4816]: I0216 14:03:06.941263 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:03:36 crc kubenswrapper[4816]: I0216 14:03:36.940374 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:03:36 crc kubenswrapper[4816]: I0216 14:03:36.941007 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:04:06 crc kubenswrapper[4816]: I0216 14:04:06.940984 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:04:06 crc kubenswrapper[4816]: I0216 14:04:06.941516 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:04:06 crc kubenswrapper[4816]: I0216 14:04:06.941568 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:04:06 crc kubenswrapper[4816]: I0216 14:04:06.942361 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0649fa5808eccbffa2a2eae8aa2f155fd777d857add32d64e5eb8d7d9e815cb8"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:04:06 crc kubenswrapper[4816]: I0216 14:04:06.942435 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://0649fa5808eccbffa2a2eae8aa2f155fd777d857add32d64e5eb8d7d9e815cb8" gracePeriod=600 Feb 16 14:04:07 crc kubenswrapper[4816]: I0216 14:04:07.602806 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="0649fa5808eccbffa2a2eae8aa2f155fd777d857add32d64e5eb8d7d9e815cb8" exitCode=0 Feb 16 14:04:07 crc kubenswrapper[4816]: I0216 14:04:07.603209 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"0649fa5808eccbffa2a2eae8aa2f155fd777d857add32d64e5eb8d7d9e815cb8"} Feb 16 14:04:07 crc kubenswrapper[4816]: I0216 14:04:07.603236 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4"} Feb 16 14:04:07 crc kubenswrapper[4816]: I0216 14:04:07.603251 4816 scope.go:117] "RemoveContainer" containerID="ca7b3eac50592c09c23c1ae4da3e7ce76e53a64b978e0e503d4e5d078ffcd15c" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.337136 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fvz48"] Feb 16 14:04:20 crc kubenswrapper[4816]: E0216 14:04:20.338102 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerName="extract-utilities" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338120 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerName="extract-utilities" Feb 16 14:04:20 crc kubenswrapper[4816]: E0216 14:04:20.338150 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerName="extract-content" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338158 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerName="extract-content" Feb 16 14:04:20 crc kubenswrapper[4816]: E0216 14:04:20.338169 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4961238-a988-4ee1-a41a-749181d06214" containerName="registry-server" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338177 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4961238-a988-4ee1-a41a-749181d06214" containerName="registry-server" Feb 16 14:04:20 crc kubenswrapper[4816]: E0216 14:04:20.338188 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4961238-a988-4ee1-a41a-749181d06214" containerName="extract-utilities" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338194 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4961238-a988-4ee1-a41a-749181d06214" containerName="extract-utilities" Feb 16 14:04:20 crc kubenswrapper[4816]: E0216 14:04:20.338203 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerName="registry-server" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338209 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerName="registry-server" Feb 16 14:04:20 crc kubenswrapper[4816]: E0216 14:04:20.338220 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerName="extract-content" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338226 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerName="extract-content" Feb 16 14:04:20 crc kubenswrapper[4816]: E0216 14:04:20.338236 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerName="registry-server" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338242 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerName="registry-server" Feb 16 14:04:20 crc kubenswrapper[4816]: E0216 14:04:20.338256 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerName="extract-utilities" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338265 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerName="extract-utilities" Feb 16 14:04:20 crc kubenswrapper[4816]: E0216 14:04:20.338291 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4961238-a988-4ee1-a41a-749181d06214" containerName="extract-content" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338298 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4961238-a988-4ee1-a41a-749181d06214" containerName="extract-content" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338439 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="13702f48-2b62-4ab0-9170-a53ff65d569a" containerName="registry-server" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338453 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="00c4319c-beca-46de-bc82-4a36a51a2fa0" containerName="registry-server" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.338470 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4961238-a988-4ee1-a41a-749181d06214" containerName="registry-server" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.339558 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.360693 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fvz48"] Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.437614 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-utilities\") pod \"certified-operators-fvz48\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.437700 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-catalog-content\") pod \"certified-operators-fvz48\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.437824 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd7jl\" (UniqueName: \"kubernetes.io/projected/f4517a80-407e-4c73-9e2a-4c25ab0f495d-kube-api-access-rd7jl\") pod \"certified-operators-fvz48\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.539696 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd7jl\" (UniqueName: \"kubernetes.io/projected/f4517a80-407e-4c73-9e2a-4c25ab0f495d-kube-api-access-rd7jl\") pod \"certified-operators-fvz48\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.539766 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-utilities\") pod \"certified-operators-fvz48\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.539784 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-catalog-content\") pod \"certified-operators-fvz48\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.540380 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-utilities\") pod \"certified-operators-fvz48\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.540628 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-catalog-content\") pod \"certified-operators-fvz48\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.570679 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd7jl\" (UniqueName: \"kubernetes.io/projected/f4517a80-407e-4c73-9e2a-4c25ab0f495d-kube-api-access-rd7jl\") pod \"certified-operators-fvz48\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:20 crc kubenswrapper[4816]: I0216 14:04:20.658730 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:21 crc kubenswrapper[4816]: I0216 14:04:21.127391 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fvz48"] Feb 16 14:04:21 crc kubenswrapper[4816]: I0216 14:04:21.727515 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerID="da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1" exitCode=0 Feb 16 14:04:21 crc kubenswrapper[4816]: I0216 14:04:21.727584 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvz48" event={"ID":"f4517a80-407e-4c73-9e2a-4c25ab0f495d","Type":"ContainerDied","Data":"da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1"} Feb 16 14:04:21 crc kubenswrapper[4816]: I0216 14:04:21.727636 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvz48" event={"ID":"f4517a80-407e-4c73-9e2a-4c25ab0f495d","Type":"ContainerStarted","Data":"5aecceb0a43682f85e81936bfe9193d9663b2e02d453602d1b05cabe24e6ed70"} Feb 16 14:04:22 crc kubenswrapper[4816]: I0216 14:04:22.736800 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvz48" event={"ID":"f4517a80-407e-4c73-9e2a-4c25ab0f495d","Type":"ContainerStarted","Data":"a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f"} Feb 16 14:04:23 crc kubenswrapper[4816]: I0216 14:04:23.748019 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerID="a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f" exitCode=0 Feb 16 14:04:23 crc kubenswrapper[4816]: I0216 14:04:23.748136 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvz48" event={"ID":"f4517a80-407e-4c73-9e2a-4c25ab0f495d","Type":"ContainerDied","Data":"a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f"} Feb 16 14:04:24 crc kubenswrapper[4816]: I0216 14:04:24.756534 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvz48" event={"ID":"f4517a80-407e-4c73-9e2a-4c25ab0f495d","Type":"ContainerStarted","Data":"18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899"} Feb 16 14:04:24 crc kubenswrapper[4816]: I0216 14:04:24.779123 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fvz48" podStartSLOduration=2.357013292 podStartE2EDuration="4.779102946s" podCreationTimestamp="2026-02-16 14:04:20 +0000 UTC" firstStartedPulling="2026-02-16 14:04:21.729630655 +0000 UTC m=+3661.056344383" lastFinishedPulling="2026-02-16 14:04:24.151720309 +0000 UTC m=+3663.478434037" observedRunningTime="2026-02-16 14:04:24.773353189 +0000 UTC m=+3664.100066937" watchObservedRunningTime="2026-02-16 14:04:24.779102946 +0000 UTC m=+3664.105816664" Feb 16 14:04:30 crc kubenswrapper[4816]: I0216 14:04:30.660034 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:30 crc kubenswrapper[4816]: I0216 14:04:30.660917 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:30 crc kubenswrapper[4816]: I0216 14:04:30.728725 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:30 crc kubenswrapper[4816]: I0216 14:04:30.862337 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:30 crc kubenswrapper[4816]: I0216 14:04:30.972401 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fvz48"] Feb 16 14:04:32 crc kubenswrapper[4816]: I0216 14:04:32.819605 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fvz48" podUID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerName="registry-server" containerID="cri-o://18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899" gracePeriod=2 Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.290216 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.449077 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd7jl\" (UniqueName: \"kubernetes.io/projected/f4517a80-407e-4c73-9e2a-4c25ab0f495d-kube-api-access-rd7jl\") pod \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.449379 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-utilities\") pod \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.449419 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-catalog-content\") pod \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\" (UID: \"f4517a80-407e-4c73-9e2a-4c25ab0f495d\") " Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.450293 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-utilities" (OuterVolumeSpecName: "utilities") pod "f4517a80-407e-4c73-9e2a-4c25ab0f495d" (UID: "f4517a80-407e-4c73-9e2a-4c25ab0f495d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.454672 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4517a80-407e-4c73-9e2a-4c25ab0f495d-kube-api-access-rd7jl" (OuterVolumeSpecName: "kube-api-access-rd7jl") pod "f4517a80-407e-4c73-9e2a-4c25ab0f495d" (UID: "f4517a80-407e-4c73-9e2a-4c25ab0f495d"). InnerVolumeSpecName "kube-api-access-rd7jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.511319 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4517a80-407e-4c73-9e2a-4c25ab0f495d" (UID: "f4517a80-407e-4c73-9e2a-4c25ab0f495d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.551492 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.551682 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4517a80-407e-4c73-9e2a-4c25ab0f495d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.551707 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd7jl\" (UniqueName: \"kubernetes.io/projected/f4517a80-407e-4c73-9e2a-4c25ab0f495d-kube-api-access-rd7jl\") on node \"crc\" DevicePath \"\"" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.825913 4816 generic.go:334] "Generic (PLEG): container finished" podID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerID="18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899" exitCode=0 Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.825957 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvz48" event={"ID":"f4517a80-407e-4c73-9e2a-4c25ab0f495d","Type":"ContainerDied","Data":"18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899"} Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.825989 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fvz48" event={"ID":"f4517a80-407e-4c73-9e2a-4c25ab0f495d","Type":"ContainerDied","Data":"5aecceb0a43682f85e81936bfe9193d9663b2e02d453602d1b05cabe24e6ed70"} Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.826006 4816 scope.go:117] "RemoveContainer" containerID="18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.826026 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fvz48" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.844734 4816 scope.go:117] "RemoveContainer" containerID="a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.857134 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fvz48"] Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.861905 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fvz48"] Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.884782 4816 scope.go:117] "RemoveContainer" containerID="da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.900517 4816 scope.go:117] "RemoveContainer" containerID="18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899" Feb 16 14:04:33 crc kubenswrapper[4816]: E0216 14:04:33.901078 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899\": container with ID starting with 18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899 not found: ID does not exist" containerID="18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.901115 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899"} err="failed to get container status \"18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899\": rpc error: code = NotFound desc = could not find container \"18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899\": container with ID starting with 18be5641fcae74cfe7f7eca20970ca3e199d94ac7df7ace3672c655ad089b899 not found: ID does not exist" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.901141 4816 scope.go:117] "RemoveContainer" containerID="a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f" Feb 16 14:04:33 crc kubenswrapper[4816]: E0216 14:04:33.901635 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f\": container with ID starting with a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f not found: ID does not exist" containerID="a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.901726 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f"} err="failed to get container status \"a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f\": rpc error: code = NotFound desc = could not find container \"a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f\": container with ID starting with a64c391bbe96fdf72dce20b93cde0c8c4a76b88d4698a6008d099459cc00269f not found: ID does not exist" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.901754 4816 scope.go:117] "RemoveContainer" containerID="da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1" Feb 16 14:04:33 crc kubenswrapper[4816]: E0216 14:04:33.902075 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1\": container with ID starting with da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1 not found: ID does not exist" containerID="da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1" Feb 16 14:04:33 crc kubenswrapper[4816]: I0216 14:04:33.902099 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1"} err="failed to get container status \"da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1\": rpc error: code = NotFound desc = could not find container \"da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1\": container with ID starting with da20d9d0fdf2d8f9e0b5d4541348f9fe61e9b7c7c2618e0866b1fc3ddfae43f1 not found: ID does not exist" Feb 16 14:04:35 crc kubenswrapper[4816]: I0216 14:04:35.409336 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" path="/var/lib/kubelet/pods/f4517a80-407e-4c73-9e2a-4c25ab0f495d/volumes" Feb 16 14:06:36 crc kubenswrapper[4816]: I0216 14:06:36.940792 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:06:36 crc kubenswrapper[4816]: I0216 14:06:36.941349 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:07:06 crc kubenswrapper[4816]: I0216 14:07:06.940879 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:07:06 crc kubenswrapper[4816]: I0216 14:07:06.941575 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:07:36 crc kubenswrapper[4816]: I0216 14:07:36.940986 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:07:36 crc kubenswrapper[4816]: I0216 14:07:36.941590 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:07:36 crc kubenswrapper[4816]: I0216 14:07:36.941635 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:07:36 crc kubenswrapper[4816]: I0216 14:07:36.942408 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:07:36 crc kubenswrapper[4816]: I0216 14:07:36.942466 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" gracePeriod=600 Feb 16 14:07:37 crc kubenswrapper[4816]: E0216 14:07:37.067480 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:07:37 crc kubenswrapper[4816]: I0216 14:07:37.271981 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" exitCode=0 Feb 16 14:07:37 crc kubenswrapper[4816]: I0216 14:07:37.272060 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4"} Feb 16 14:07:37 crc kubenswrapper[4816]: I0216 14:07:37.272151 4816 scope.go:117] "RemoveContainer" containerID="0649fa5808eccbffa2a2eae8aa2f155fd777d857add32d64e5eb8d7d9e815cb8" Feb 16 14:07:37 crc kubenswrapper[4816]: I0216 14:07:37.272541 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:07:37 crc kubenswrapper[4816]: E0216 14:07:37.272820 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:07:48 crc kubenswrapper[4816]: I0216 14:07:48.398859 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:07:48 crc kubenswrapper[4816]: E0216 14:07:48.399686 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:08:00 crc kubenswrapper[4816]: I0216 14:08:00.399331 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:08:00 crc kubenswrapper[4816]: E0216 14:08:00.399992 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:08:13 crc kubenswrapper[4816]: I0216 14:08:13.398640 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:08:13 crc kubenswrapper[4816]: E0216 14:08:13.399903 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:08:25 crc kubenswrapper[4816]: I0216 14:08:25.399630 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:08:25 crc kubenswrapper[4816]: E0216 14:08:25.400688 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:08:40 crc kubenswrapper[4816]: I0216 14:08:40.398567 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:08:40 crc kubenswrapper[4816]: E0216 14:08:40.399331 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:08:52 crc kubenswrapper[4816]: I0216 14:08:52.399169 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:08:52 crc kubenswrapper[4816]: E0216 14:08:52.400042 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:09:06 crc kubenswrapper[4816]: I0216 14:09:06.401196 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:09:06 crc kubenswrapper[4816]: E0216 14:09:06.401872 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:09:21 crc kubenswrapper[4816]: I0216 14:09:21.404040 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:09:21 crc kubenswrapper[4816]: E0216 14:09:21.405097 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:09:36 crc kubenswrapper[4816]: I0216 14:09:36.399252 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:09:36 crc kubenswrapper[4816]: E0216 14:09:36.400090 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:09:48 crc kubenswrapper[4816]: I0216 14:09:48.398853 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:09:48 crc kubenswrapper[4816]: E0216 14:09:48.399568 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:10:00 crc kubenswrapper[4816]: I0216 14:10:00.398888 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:10:00 crc kubenswrapper[4816]: E0216 14:10:00.399685 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:10:11 crc kubenswrapper[4816]: I0216 14:10:11.408583 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:10:11 crc kubenswrapper[4816]: E0216 14:10:11.411175 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:10:26 crc kubenswrapper[4816]: I0216 14:10:26.398942 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:10:26 crc kubenswrapper[4816]: E0216 14:10:26.400115 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:10:40 crc kubenswrapper[4816]: I0216 14:10:40.398320 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:10:40 crc kubenswrapper[4816]: E0216 14:10:40.398971 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:10:54 crc kubenswrapper[4816]: I0216 14:10:54.398983 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:10:54 crc kubenswrapper[4816]: E0216 14:10:54.399703 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:11:09 crc kubenswrapper[4816]: I0216 14:11:09.399195 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:11:09 crc kubenswrapper[4816]: E0216 14:11:09.399947 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:11:20 crc kubenswrapper[4816]: I0216 14:11:20.398777 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:11:20 crc kubenswrapper[4816]: E0216 14:11:20.400022 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:11:33 crc kubenswrapper[4816]: I0216 14:11:33.399233 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:11:33 crc kubenswrapper[4816]: E0216 14:11:33.399975 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:11:46 crc kubenswrapper[4816]: I0216 14:11:46.398866 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:11:46 crc kubenswrapper[4816]: E0216 14:11:46.399593 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.218067 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fgwnr"] Feb 16 14:11:49 crc kubenswrapper[4816]: E0216 14:11:49.221440 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerName="extract-content" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.221479 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerName="extract-content" Feb 16 14:11:49 crc kubenswrapper[4816]: E0216 14:11:49.221521 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerName="extract-utilities" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.221533 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerName="extract-utilities" Feb 16 14:11:49 crc kubenswrapper[4816]: E0216 14:11:49.221563 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerName="registry-server" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.221578 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerName="registry-server" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.221897 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4517a80-407e-4c73-9e2a-4c25ab0f495d" containerName="registry-server" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.223758 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.235044 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fgwnr"] Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.313600 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-catalog-content\") pod \"community-operators-fgwnr\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.313996 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw28n\" (UniqueName: \"kubernetes.io/projected/7634bb29-8fe4-4ece-83c5-dd406bdb626e-kube-api-access-zw28n\") pod \"community-operators-fgwnr\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.314077 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-utilities\") pod \"community-operators-fgwnr\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.415047 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-utilities\") pod \"community-operators-fgwnr\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.415121 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-catalog-content\") pod \"community-operators-fgwnr\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.415152 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw28n\" (UniqueName: \"kubernetes.io/projected/7634bb29-8fe4-4ece-83c5-dd406bdb626e-kube-api-access-zw28n\") pod \"community-operators-fgwnr\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.415773 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-catalog-content\") pod \"community-operators-fgwnr\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.415889 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-utilities\") pod \"community-operators-fgwnr\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.436971 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw28n\" (UniqueName: \"kubernetes.io/projected/7634bb29-8fe4-4ece-83c5-dd406bdb626e-kube-api-access-zw28n\") pod \"community-operators-fgwnr\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.541444 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:49 crc kubenswrapper[4816]: I0216 14:11:49.821790 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fgwnr"] Feb 16 14:11:50 crc kubenswrapper[4816]: I0216 14:11:50.558473 4816 generic.go:334] "Generic (PLEG): container finished" podID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerID="814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb" exitCode=0 Feb 16 14:11:50 crc kubenswrapper[4816]: I0216 14:11:50.558526 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgwnr" event={"ID":"7634bb29-8fe4-4ece-83c5-dd406bdb626e","Type":"ContainerDied","Data":"814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb"} Feb 16 14:11:50 crc kubenswrapper[4816]: I0216 14:11:50.558561 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgwnr" event={"ID":"7634bb29-8fe4-4ece-83c5-dd406bdb626e","Type":"ContainerStarted","Data":"08c73fdc033523773fc2a9e332da663de6d70fa366541197de4e38df04108af5"} Feb 16 14:11:50 crc kubenswrapper[4816]: I0216 14:11:50.561568 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 14:11:51 crc kubenswrapper[4816]: I0216 14:11:51.575526 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgwnr" event={"ID":"7634bb29-8fe4-4ece-83c5-dd406bdb626e","Type":"ContainerStarted","Data":"0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750"} Feb 16 14:11:52 crc kubenswrapper[4816]: I0216 14:11:52.583802 4816 generic.go:334] "Generic (PLEG): container finished" podID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerID="0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750" exitCode=0 Feb 16 14:11:52 crc kubenswrapper[4816]: I0216 14:11:52.583876 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgwnr" event={"ID":"7634bb29-8fe4-4ece-83c5-dd406bdb626e","Type":"ContainerDied","Data":"0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750"} Feb 16 14:11:53 crc kubenswrapper[4816]: I0216 14:11:53.598933 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgwnr" event={"ID":"7634bb29-8fe4-4ece-83c5-dd406bdb626e","Type":"ContainerStarted","Data":"795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1"} Feb 16 14:11:53 crc kubenswrapper[4816]: I0216 14:11:53.620041 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fgwnr" podStartSLOduration=2.215018485 podStartE2EDuration="4.620023434s" podCreationTimestamp="2026-02-16 14:11:49 +0000 UTC" firstStartedPulling="2026-02-16 14:11:50.561300015 +0000 UTC m=+4109.888013743" lastFinishedPulling="2026-02-16 14:11:52.966304974 +0000 UTC m=+4112.293018692" observedRunningTime="2026-02-16 14:11:53.61694481 +0000 UTC m=+4112.943658548" watchObservedRunningTime="2026-02-16 14:11:53.620023434 +0000 UTC m=+4112.946737172" Feb 16 14:11:57 crc kubenswrapper[4816]: I0216 14:11:57.399190 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:11:57 crc kubenswrapper[4816]: E0216 14:11:57.400066 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:11:59 crc kubenswrapper[4816]: I0216 14:11:59.542739 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:59 crc kubenswrapper[4816]: I0216 14:11:59.543224 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:59 crc kubenswrapper[4816]: I0216 14:11:59.610969 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:59 crc kubenswrapper[4816]: I0216 14:11:59.691841 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:11:59 crc kubenswrapper[4816]: I0216 14:11:59.860421 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fgwnr"] Feb 16 14:12:01 crc kubenswrapper[4816]: I0216 14:12:01.666662 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fgwnr" podUID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerName="registry-server" containerID="cri-o://795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1" gracePeriod=2 Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.034907 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.207517 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-utilities\") pod \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.207623 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-catalog-content\") pod \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.207656 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw28n\" (UniqueName: \"kubernetes.io/projected/7634bb29-8fe4-4ece-83c5-dd406bdb626e-kube-api-access-zw28n\") pod \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\" (UID: \"7634bb29-8fe4-4ece-83c5-dd406bdb626e\") " Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.208786 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-utilities" (OuterVolumeSpecName: "utilities") pod "7634bb29-8fe4-4ece-83c5-dd406bdb626e" (UID: "7634bb29-8fe4-4ece-83c5-dd406bdb626e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.220919 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7634bb29-8fe4-4ece-83c5-dd406bdb626e-kube-api-access-zw28n" (OuterVolumeSpecName: "kube-api-access-zw28n") pod "7634bb29-8fe4-4ece-83c5-dd406bdb626e" (UID: "7634bb29-8fe4-4ece-83c5-dd406bdb626e"). InnerVolumeSpecName "kube-api-access-zw28n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.309266 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw28n\" (UniqueName: \"kubernetes.io/projected/7634bb29-8fe4-4ece-83c5-dd406bdb626e-kube-api-access-zw28n\") on node \"crc\" DevicePath \"\"" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.309310 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.520192 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7634bb29-8fe4-4ece-83c5-dd406bdb626e" (UID: "7634bb29-8fe4-4ece-83c5-dd406bdb626e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.613372 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7634bb29-8fe4-4ece-83c5-dd406bdb626e-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.677413 4816 generic.go:334] "Generic (PLEG): container finished" podID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerID="795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1" exitCode=0 Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.677506 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgwnr" event={"ID":"7634bb29-8fe4-4ece-83c5-dd406bdb626e","Type":"ContainerDied","Data":"795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1"} Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.677542 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fgwnr" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.677607 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgwnr" event={"ID":"7634bb29-8fe4-4ece-83c5-dd406bdb626e","Type":"ContainerDied","Data":"08c73fdc033523773fc2a9e332da663de6d70fa366541197de4e38df04108af5"} Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.677635 4816 scope.go:117] "RemoveContainer" containerID="795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.703130 4816 scope.go:117] "RemoveContainer" containerID="0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.723233 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fgwnr"] Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.730562 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fgwnr"] Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.740136 4816 scope.go:117] "RemoveContainer" containerID="814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.756471 4816 scope.go:117] "RemoveContainer" containerID="795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1" Feb 16 14:12:02 crc kubenswrapper[4816]: E0216 14:12:02.757419 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1\": container with ID starting with 795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1 not found: ID does not exist" containerID="795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.757507 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1"} err="failed to get container status \"795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1\": rpc error: code = NotFound desc = could not find container \"795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1\": container with ID starting with 795918e8ab773334bd1c0332bccfa66f814195c084fbde720f0387947716a0f1 not found: ID does not exist" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.757546 4816 scope.go:117] "RemoveContainer" containerID="0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750" Feb 16 14:12:02 crc kubenswrapper[4816]: E0216 14:12:02.758099 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750\": container with ID starting with 0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750 not found: ID does not exist" containerID="0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.758147 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750"} err="failed to get container status \"0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750\": rpc error: code = NotFound desc = could not find container \"0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750\": container with ID starting with 0ab417ddf9bb43cb8a1262a02d4ebe16cd12fb9ff93b8485016b376b5b781750 not found: ID does not exist" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.758182 4816 scope.go:117] "RemoveContainer" containerID="814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb" Feb 16 14:12:02 crc kubenswrapper[4816]: E0216 14:12:02.758842 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb\": container with ID starting with 814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb not found: ID does not exist" containerID="814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb" Feb 16 14:12:02 crc kubenswrapper[4816]: I0216 14:12:02.758878 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb"} err="failed to get container status \"814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb\": rpc error: code = NotFound desc = could not find container \"814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb\": container with ID starting with 814223253ea7b47503e5e01fa9bd2effe2e9896184d4ecf7900131a02f5d3deb not found: ID does not exist" Feb 16 14:12:03 crc kubenswrapper[4816]: I0216 14:12:03.407327 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" path="/var/lib/kubelet/pods/7634bb29-8fe4-4ece-83c5-dd406bdb626e/volumes" Feb 16 14:12:10 crc kubenswrapper[4816]: I0216 14:12:10.399102 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:12:10 crc kubenswrapper[4816]: E0216 14:12:10.400070 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:12:22 crc kubenswrapper[4816]: I0216 14:12:22.399078 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:12:22 crc kubenswrapper[4816]: E0216 14:12:22.401348 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:12:33 crc kubenswrapper[4816]: I0216 14:12:33.399031 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:12:33 crc kubenswrapper[4816]: E0216 14:12:33.400142 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:12:45 crc kubenswrapper[4816]: I0216 14:12:45.399357 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:12:46 crc kubenswrapper[4816]: I0216 14:12:46.007269 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"caed64ee3964a4421120d568fb03c81370a76bf657975e0b221514bdb9b6cf0f"} Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.825478 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v28l6"] Feb 16 14:14:24 crc kubenswrapper[4816]: E0216 14:14:24.827569 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerName="registry-server" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.827745 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerName="registry-server" Feb 16 14:14:24 crc kubenswrapper[4816]: E0216 14:14:24.827860 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerName="extract-content" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.827943 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerName="extract-content" Feb 16 14:14:24 crc kubenswrapper[4816]: E0216 14:14:24.828033 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerName="extract-utilities" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.828112 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerName="extract-utilities" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.828373 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7634bb29-8fe4-4ece-83c5-dd406bdb626e" containerName="registry-server" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.830284 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.847125 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v28l6"] Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.896604 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vgzz\" (UniqueName: \"kubernetes.io/projected/a16b916f-e670-489d-b47a-37fa4e7752a4-kube-api-access-7vgzz\") pod \"certified-operators-v28l6\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.896940 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-catalog-content\") pod \"certified-operators-v28l6\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.897099 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-utilities\") pod \"certified-operators-v28l6\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.997720 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vgzz\" (UniqueName: \"kubernetes.io/projected/a16b916f-e670-489d-b47a-37fa4e7752a4-kube-api-access-7vgzz\") pod \"certified-operators-v28l6\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.997811 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-catalog-content\") pod \"certified-operators-v28l6\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.997873 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-utilities\") pod \"certified-operators-v28l6\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.998542 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-utilities\") pod \"certified-operators-v28l6\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:24 crc kubenswrapper[4816]: I0216 14:14:24.998630 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-catalog-content\") pod \"certified-operators-v28l6\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:25 crc kubenswrapper[4816]: I0216 14:14:25.016844 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vgzz\" (UniqueName: \"kubernetes.io/projected/a16b916f-e670-489d-b47a-37fa4e7752a4-kube-api-access-7vgzz\") pod \"certified-operators-v28l6\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:25 crc kubenswrapper[4816]: I0216 14:14:25.170559 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:25 crc kubenswrapper[4816]: I0216 14:14:25.640209 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v28l6"] Feb 16 14:14:25 crc kubenswrapper[4816]: I0216 14:14:25.893740 4816 generic.go:334] "Generic (PLEG): container finished" podID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerID="4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e" exitCode=0 Feb 16 14:14:25 crc kubenswrapper[4816]: I0216 14:14:25.893813 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v28l6" event={"ID":"a16b916f-e670-489d-b47a-37fa4e7752a4","Type":"ContainerDied","Data":"4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e"} Feb 16 14:14:25 crc kubenswrapper[4816]: I0216 14:14:25.894055 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v28l6" event={"ID":"a16b916f-e670-489d-b47a-37fa4e7752a4","Type":"ContainerStarted","Data":"afcc1e60e1fa85bad3b8da52bde04e1fd71eb6ebff6a048fef7ae107707eb7bd"} Feb 16 14:14:27 crc kubenswrapper[4816]: I0216 14:14:27.916091 4816 generic.go:334] "Generic (PLEG): container finished" podID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerID="fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa" exitCode=0 Feb 16 14:14:27 crc kubenswrapper[4816]: I0216 14:14:27.916145 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v28l6" event={"ID":"a16b916f-e670-489d-b47a-37fa4e7752a4","Type":"ContainerDied","Data":"fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa"} Feb 16 14:14:28 crc kubenswrapper[4816]: I0216 14:14:28.924624 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v28l6" event={"ID":"a16b916f-e670-489d-b47a-37fa4e7752a4","Type":"ContainerStarted","Data":"89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51"} Feb 16 14:14:28 crc kubenswrapper[4816]: I0216 14:14:28.941107 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v28l6" podStartSLOduration=2.539486856 podStartE2EDuration="4.941088852s" podCreationTimestamp="2026-02-16 14:14:24 +0000 UTC" firstStartedPulling="2026-02-16 14:14:25.895646346 +0000 UTC m=+4265.222360084" lastFinishedPulling="2026-02-16 14:14:28.297248332 +0000 UTC m=+4267.623962080" observedRunningTime="2026-02-16 14:14:28.939778957 +0000 UTC m=+4268.266492695" watchObservedRunningTime="2026-02-16 14:14:28.941088852 +0000 UTC m=+4268.267802580" Feb 16 14:14:35 crc kubenswrapper[4816]: I0216 14:14:35.171375 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:35 crc kubenswrapper[4816]: I0216 14:14:35.171962 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:35 crc kubenswrapper[4816]: I0216 14:14:35.233217 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:36 crc kubenswrapper[4816]: I0216 14:14:36.052481 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:36 crc kubenswrapper[4816]: I0216 14:14:36.098945 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v28l6"] Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.009349 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-v28l6" podUID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerName="registry-server" containerID="cri-o://89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51" gracePeriod=2 Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.373559 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.397636 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-utilities\") pod \"a16b916f-e670-489d-b47a-37fa4e7752a4\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.397757 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-catalog-content\") pod \"a16b916f-e670-489d-b47a-37fa4e7752a4\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.397880 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vgzz\" (UniqueName: \"kubernetes.io/projected/a16b916f-e670-489d-b47a-37fa4e7752a4-kube-api-access-7vgzz\") pod \"a16b916f-e670-489d-b47a-37fa4e7752a4\" (UID: \"a16b916f-e670-489d-b47a-37fa4e7752a4\") " Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.403848 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-utilities" (OuterVolumeSpecName: "utilities") pod "a16b916f-e670-489d-b47a-37fa4e7752a4" (UID: "a16b916f-e670-489d-b47a-37fa4e7752a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.425052 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a16b916f-e670-489d-b47a-37fa4e7752a4-kube-api-access-7vgzz" (OuterVolumeSpecName: "kube-api-access-7vgzz") pod "a16b916f-e670-489d-b47a-37fa4e7752a4" (UID: "a16b916f-e670-489d-b47a-37fa4e7752a4"). InnerVolumeSpecName "kube-api-access-7vgzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.479738 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a16b916f-e670-489d-b47a-37fa4e7752a4" (UID: "a16b916f-e670-489d-b47a-37fa4e7752a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.499384 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.499412 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a16b916f-e670-489d-b47a-37fa4e7752a4-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:14:38 crc kubenswrapper[4816]: I0216 14:14:38.499424 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vgzz\" (UniqueName: \"kubernetes.io/projected/a16b916f-e670-489d-b47a-37fa4e7752a4-kube-api-access-7vgzz\") on node \"crc\" DevicePath \"\"" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.018114 4816 generic.go:334] "Generic (PLEG): container finished" podID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerID="89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51" exitCode=0 Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.018156 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v28l6" event={"ID":"a16b916f-e670-489d-b47a-37fa4e7752a4","Type":"ContainerDied","Data":"89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51"} Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.018209 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v28l6" event={"ID":"a16b916f-e670-489d-b47a-37fa4e7752a4","Type":"ContainerDied","Data":"afcc1e60e1fa85bad3b8da52bde04e1fd71eb6ebff6a048fef7ae107707eb7bd"} Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.018217 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v28l6" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.018226 4816 scope.go:117] "RemoveContainer" containerID="89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.039959 4816 scope.go:117] "RemoveContainer" containerID="fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.066690 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v28l6"] Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.074995 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-v28l6"] Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.085589 4816 scope.go:117] "RemoveContainer" containerID="4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.104327 4816 scope.go:117] "RemoveContainer" containerID="89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51" Feb 16 14:14:39 crc kubenswrapper[4816]: E0216 14:14:39.104713 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51\": container with ID starting with 89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51 not found: ID does not exist" containerID="89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.104811 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51"} err="failed to get container status \"89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51\": rpc error: code = NotFound desc = could not find container \"89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51\": container with ID starting with 89134551b8052430df2b94d69640e24ab3269c54eb387a73c12238ae3cc1fd51 not found: ID does not exist" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.104893 4816 scope.go:117] "RemoveContainer" containerID="fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa" Feb 16 14:14:39 crc kubenswrapper[4816]: E0216 14:14:39.105143 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa\": container with ID starting with fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa not found: ID does not exist" containerID="fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.105218 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa"} err="failed to get container status \"fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa\": rpc error: code = NotFound desc = could not find container \"fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa\": container with ID starting with fb37e33d33f92a219961544c2f87e3a7aa0120af231f97e1df4ce0c58c03beaa not found: ID does not exist" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.105286 4816 scope.go:117] "RemoveContainer" containerID="4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e" Feb 16 14:14:39 crc kubenswrapper[4816]: E0216 14:14:39.105527 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e\": container with ID starting with 4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e not found: ID does not exist" containerID="4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.105606 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e"} err="failed to get container status \"4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e\": rpc error: code = NotFound desc = could not find container \"4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e\": container with ID starting with 4cd800c11da8be0c0745d04991da754c1d7f106ae14005ff40b9309943e99c9e not found: ID does not exist" Feb 16 14:14:39 crc kubenswrapper[4816]: I0216 14:14:39.408206 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a16b916f-e670-489d-b47a-37fa4e7752a4" path="/var/lib/kubelet/pods/a16b916f-e670-489d-b47a-37fa4e7752a4/volumes" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.198955 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st"] Feb 16 14:15:00 crc kubenswrapper[4816]: E0216 14:15:00.200136 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerName="registry-server" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.200149 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerName="registry-server" Feb 16 14:15:00 crc kubenswrapper[4816]: E0216 14:15:00.200161 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerName="extract-content" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.200167 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerName="extract-content" Feb 16 14:15:00 crc kubenswrapper[4816]: E0216 14:15:00.200185 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerName="extract-utilities" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.200191 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerName="extract-utilities" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.200314 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a16b916f-e670-489d-b47a-37fa4e7752a4" containerName="registry-server" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.200778 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.202384 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.204313 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.209368 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st"] Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.297540 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f65357b-a782-4d03-bb11-e7fba09ca5f8-secret-volume\") pod \"collect-profiles-29520855-d64st\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.297710 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j7qg\" (UniqueName: \"kubernetes.io/projected/2f65357b-a782-4d03-bb11-e7fba09ca5f8-kube-api-access-7j7qg\") pod \"collect-profiles-29520855-d64st\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.297752 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f65357b-a782-4d03-bb11-e7fba09ca5f8-config-volume\") pod \"collect-profiles-29520855-d64st\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.399334 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j7qg\" (UniqueName: \"kubernetes.io/projected/2f65357b-a782-4d03-bb11-e7fba09ca5f8-kube-api-access-7j7qg\") pod \"collect-profiles-29520855-d64st\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.399825 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f65357b-a782-4d03-bb11-e7fba09ca5f8-config-volume\") pod \"collect-profiles-29520855-d64st\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.400079 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f65357b-a782-4d03-bb11-e7fba09ca5f8-secret-volume\") pod \"collect-profiles-29520855-d64st\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.400895 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f65357b-a782-4d03-bb11-e7fba09ca5f8-config-volume\") pod \"collect-profiles-29520855-d64st\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.408427 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f65357b-a782-4d03-bb11-e7fba09ca5f8-secret-volume\") pod \"collect-profiles-29520855-d64st\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.419785 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j7qg\" (UniqueName: \"kubernetes.io/projected/2f65357b-a782-4d03-bb11-e7fba09ca5f8-kube-api-access-7j7qg\") pod \"collect-profiles-29520855-d64st\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.527770 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:00 crc kubenswrapper[4816]: I0216 14:15:00.935043 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st"] Feb 16 14:15:00 crc kubenswrapper[4816]: W0216 14:15:00.936068 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f65357b_a782_4d03_bb11_e7fba09ca5f8.slice/crio-84ad5bdf6a69116840a3b3dd76f82ecea13c49909f022d34615c5ca695b7eddb WatchSource:0}: Error finding container 84ad5bdf6a69116840a3b3dd76f82ecea13c49909f022d34615c5ca695b7eddb: Status 404 returned error can't find the container with id 84ad5bdf6a69116840a3b3dd76f82ecea13c49909f022d34615c5ca695b7eddb Feb 16 14:15:01 crc kubenswrapper[4816]: I0216 14:15:01.184534 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" event={"ID":"2f65357b-a782-4d03-bb11-e7fba09ca5f8","Type":"ContainerStarted","Data":"c7aaebcda910099191102012e7f4056f1f678285653262e0b510637a4ce24e86"} Feb 16 14:15:01 crc kubenswrapper[4816]: I0216 14:15:01.184573 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" event={"ID":"2f65357b-a782-4d03-bb11-e7fba09ca5f8","Type":"ContainerStarted","Data":"84ad5bdf6a69116840a3b3dd76f82ecea13c49909f022d34615c5ca695b7eddb"} Feb 16 14:15:01 crc kubenswrapper[4816]: I0216 14:15:01.210765 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" podStartSLOduration=1.210734952 podStartE2EDuration="1.210734952s" podCreationTimestamp="2026-02-16 14:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:15:01.20151875 +0000 UTC m=+4300.528232478" watchObservedRunningTime="2026-02-16 14:15:01.210734952 +0000 UTC m=+4300.537448710" Feb 16 14:15:02 crc kubenswrapper[4816]: I0216 14:15:02.192291 4816 generic.go:334] "Generic (PLEG): container finished" podID="2f65357b-a782-4d03-bb11-e7fba09ca5f8" containerID="c7aaebcda910099191102012e7f4056f1f678285653262e0b510637a4ce24e86" exitCode=0 Feb 16 14:15:02 crc kubenswrapper[4816]: I0216 14:15:02.192331 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" event={"ID":"2f65357b-a782-4d03-bb11-e7fba09ca5f8","Type":"ContainerDied","Data":"c7aaebcda910099191102012e7f4056f1f678285653262e0b510637a4ce24e86"} Feb 16 14:15:03 crc kubenswrapper[4816]: I0216 14:15:03.860016 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:03 crc kubenswrapper[4816]: I0216 14:15:03.961334 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7j7qg\" (UniqueName: \"kubernetes.io/projected/2f65357b-a782-4d03-bb11-e7fba09ca5f8-kube-api-access-7j7qg\") pod \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " Feb 16 14:15:03 crc kubenswrapper[4816]: I0216 14:15:03.961387 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f65357b-a782-4d03-bb11-e7fba09ca5f8-secret-volume\") pod \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " Feb 16 14:15:03 crc kubenswrapper[4816]: I0216 14:15:03.961418 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f65357b-a782-4d03-bb11-e7fba09ca5f8-config-volume\") pod \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\" (UID: \"2f65357b-a782-4d03-bb11-e7fba09ca5f8\") " Feb 16 14:15:03 crc kubenswrapper[4816]: I0216 14:15:03.962077 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f65357b-a782-4d03-bb11-e7fba09ca5f8-config-volume" (OuterVolumeSpecName: "config-volume") pod "2f65357b-a782-4d03-bb11-e7fba09ca5f8" (UID: "2f65357b-a782-4d03-bb11-e7fba09ca5f8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:15:03 crc kubenswrapper[4816]: I0216 14:15:03.973223 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f65357b-a782-4d03-bb11-e7fba09ca5f8-kube-api-access-7j7qg" (OuterVolumeSpecName: "kube-api-access-7j7qg") pod "2f65357b-a782-4d03-bb11-e7fba09ca5f8" (UID: "2f65357b-a782-4d03-bb11-e7fba09ca5f8"). InnerVolumeSpecName "kube-api-access-7j7qg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:15:03 crc kubenswrapper[4816]: I0216 14:15:03.973309 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f65357b-a782-4d03-bb11-e7fba09ca5f8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2f65357b-a782-4d03-bb11-e7fba09ca5f8" (UID: "2f65357b-a782-4d03-bb11-e7fba09ca5f8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:15:04 crc kubenswrapper[4816]: I0216 14:15:04.062328 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7j7qg\" (UniqueName: \"kubernetes.io/projected/2f65357b-a782-4d03-bb11-e7fba09ca5f8-kube-api-access-7j7qg\") on node \"crc\" DevicePath \"\"" Feb 16 14:15:04 crc kubenswrapper[4816]: I0216 14:15:04.062359 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f65357b-a782-4d03-bb11-e7fba09ca5f8-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 14:15:04 crc kubenswrapper[4816]: I0216 14:15:04.062372 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f65357b-a782-4d03-bb11-e7fba09ca5f8-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 14:15:04 crc kubenswrapper[4816]: I0216 14:15:04.206006 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" event={"ID":"2f65357b-a782-4d03-bb11-e7fba09ca5f8","Type":"ContainerDied","Data":"84ad5bdf6a69116840a3b3dd76f82ecea13c49909f022d34615c5ca695b7eddb"} Feb 16 14:15:04 crc kubenswrapper[4816]: I0216 14:15:04.206057 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st" Feb 16 14:15:04 crc kubenswrapper[4816]: I0216 14:15:04.206067 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84ad5bdf6a69116840a3b3dd76f82ecea13c49909f022d34615c5ca695b7eddb" Feb 16 14:15:04 crc kubenswrapper[4816]: I0216 14:15:04.280934 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55"] Feb 16 14:15:04 crc kubenswrapper[4816]: I0216 14:15:04.286291 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520810-xhd55"] Feb 16 14:15:05 crc kubenswrapper[4816]: I0216 14:15:05.409859 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="825e57ca-7b7b-4a34-8211-0ea0e222f836" path="/var/lib/kubelet/pods/825e57ca-7b7b-4a34-8211-0ea0e222f836/volumes" Feb 16 14:15:06 crc kubenswrapper[4816]: I0216 14:15:06.941334 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:15:06 crc kubenswrapper[4816]: I0216 14:15:06.941404 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:15:26 crc kubenswrapper[4816]: I0216 14:15:26.802121 4816 scope.go:117] "RemoveContainer" containerID="7051296f9781ee33810fdf36264bc8ace4ae05d908028c2e3f50e035cf453c06" Feb 16 14:15:36 crc kubenswrapper[4816]: I0216 14:15:36.941099 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:15:36 crc kubenswrapper[4816]: I0216 14:15:36.941676 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:16:06 crc kubenswrapper[4816]: I0216 14:16:06.941187 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:16:06 crc kubenswrapper[4816]: I0216 14:16:06.941746 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:16:06 crc kubenswrapper[4816]: I0216 14:16:06.941794 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:16:06 crc kubenswrapper[4816]: I0216 14:16:06.942434 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"caed64ee3964a4421120d568fb03c81370a76bf657975e0b221514bdb9b6cf0f"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:16:06 crc kubenswrapper[4816]: I0216 14:16:06.942479 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://caed64ee3964a4421120d568fb03c81370a76bf657975e0b221514bdb9b6cf0f" gracePeriod=600 Feb 16 14:16:07 crc kubenswrapper[4816]: I0216 14:16:07.667070 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="caed64ee3964a4421120d568fb03c81370a76bf657975e0b221514bdb9b6cf0f" exitCode=0 Feb 16 14:16:07 crc kubenswrapper[4816]: I0216 14:16:07.667142 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"caed64ee3964a4421120d568fb03c81370a76bf657975e0b221514bdb9b6cf0f"} Feb 16 14:16:07 crc kubenswrapper[4816]: I0216 14:16:07.667528 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690"} Feb 16 14:16:07 crc kubenswrapper[4816]: I0216 14:16:07.667550 4816 scope.go:117] "RemoveContainer" containerID="ad96ca3a27191529c57b297c4fa78ee9a9569c4a4135a2b1373399ad320e1aa4" Feb 16 14:16:53 crc kubenswrapper[4816]: I0216 14:16:53.926035 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-8jlcx"] Feb 16 14:16:53 crc kubenswrapper[4816]: I0216 14:16:53.933472 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-8jlcx"] Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.036736 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-98z2r"] Feb 16 14:16:54 crc kubenswrapper[4816]: E0216 14:16:54.037420 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f65357b-a782-4d03-bb11-e7fba09ca5f8" containerName="collect-profiles" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.037443 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f65357b-a782-4d03-bb11-e7fba09ca5f8" containerName="collect-profiles" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.037905 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f65357b-a782-4d03-bb11-e7fba09ca5f8" containerName="collect-profiles" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.039096 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.045427 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.045448 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.045914 4816 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-6zflt" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.046033 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.060466 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-98z2r"] Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.139359 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgwvp\" (UniqueName: \"kubernetes.io/projected/3e18f57d-16fb-44e1-9221-6915d6bfff0f-kube-api-access-lgwvp\") pod \"crc-storage-crc-98z2r\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.139449 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/3e18f57d-16fb-44e1-9221-6915d6bfff0f-crc-storage\") pod \"crc-storage-crc-98z2r\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.139647 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/3e18f57d-16fb-44e1-9221-6915d6bfff0f-node-mnt\") pod \"crc-storage-crc-98z2r\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.240820 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgwvp\" (UniqueName: \"kubernetes.io/projected/3e18f57d-16fb-44e1-9221-6915d6bfff0f-kube-api-access-lgwvp\") pod \"crc-storage-crc-98z2r\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.240876 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/3e18f57d-16fb-44e1-9221-6915d6bfff0f-crc-storage\") pod \"crc-storage-crc-98z2r\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.240900 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/3e18f57d-16fb-44e1-9221-6915d6bfff0f-node-mnt\") pod \"crc-storage-crc-98z2r\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.241218 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/3e18f57d-16fb-44e1-9221-6915d6bfff0f-node-mnt\") pod \"crc-storage-crc-98z2r\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.241722 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/3e18f57d-16fb-44e1-9221-6915d6bfff0f-crc-storage\") pod \"crc-storage-crc-98z2r\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.272299 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgwvp\" (UniqueName: \"kubernetes.io/projected/3e18f57d-16fb-44e1-9221-6915d6bfff0f-kube-api-access-lgwvp\") pod \"crc-storage-crc-98z2r\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.375262 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.832019 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-98z2r"] Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.839943 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.993787 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cctdj"] Feb 16 14:16:54 crc kubenswrapper[4816]: I0216 14:16:54.995581 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.009121 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cctdj"] Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.028582 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-98z2r" event={"ID":"3e18f57d-16fb-44e1-9221-6915d6bfff0f","Type":"ContainerStarted","Data":"e0e01ed0910f98ea477080a34f5af93f78f942ccecc0a3aaf6c9e9deb5e1a558"} Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.152479 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-catalog-content\") pod \"redhat-marketplace-cctdj\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.152550 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-utilities\") pod \"redhat-marketplace-cctdj\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.152578 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txgtz\" (UniqueName: \"kubernetes.io/projected/d51cc27b-afa3-40bd-a8d0-832145cc65d8-kube-api-access-txgtz\") pod \"redhat-marketplace-cctdj\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.192246 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pv24l"] Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.193611 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.204267 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pv24l"] Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.254462 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-catalog-content\") pod \"redhat-marketplace-cctdj\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.254541 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-utilities\") pod \"redhat-marketplace-cctdj\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.254569 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txgtz\" (UniqueName: \"kubernetes.io/projected/d51cc27b-afa3-40bd-a8d0-832145cc65d8-kube-api-access-txgtz\") pod \"redhat-marketplace-cctdj\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.255180 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-utilities\") pod \"redhat-marketplace-cctdj\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.255306 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-catalog-content\") pod \"redhat-marketplace-cctdj\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.272030 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txgtz\" (UniqueName: \"kubernetes.io/projected/d51cc27b-afa3-40bd-a8d0-832145cc65d8-kube-api-access-txgtz\") pod \"redhat-marketplace-cctdj\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.330847 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.356602 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d5sk\" (UniqueName: \"kubernetes.io/projected/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-kube-api-access-8d5sk\") pod \"redhat-operators-pv24l\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.356724 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-catalog-content\") pod \"redhat-operators-pv24l\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.356773 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-utilities\") pod \"redhat-operators-pv24l\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.409214 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06079d91-cbb5-4049-9ec4-9a78778c3846" path="/var/lib/kubelet/pods/06079d91-cbb5-4049-9ec4-9a78778c3846/volumes" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.477885 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d5sk\" (UniqueName: \"kubernetes.io/projected/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-kube-api-access-8d5sk\") pod \"redhat-operators-pv24l\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.478195 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-catalog-content\") pod \"redhat-operators-pv24l\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.478235 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-utilities\") pod \"redhat-operators-pv24l\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.482686 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-utilities\") pod \"redhat-operators-pv24l\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.482716 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-catalog-content\") pod \"redhat-operators-pv24l\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.495576 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d5sk\" (UniqueName: \"kubernetes.io/projected/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-kube-api-access-8d5sk\") pod \"redhat-operators-pv24l\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.512285 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.836283 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cctdj"] Feb 16 14:16:55 crc kubenswrapper[4816]: W0216 14:16:55.868494 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd51cc27b_afa3_40bd_a8d0_832145cc65d8.slice/crio-5bcfb9e1d299672ed1f6796b45e06f329d93f16c57eb5bb946fd02c73e045645 WatchSource:0}: Error finding container 5bcfb9e1d299672ed1f6796b45e06f329d93f16c57eb5bb946fd02c73e045645: Status 404 returned error can't find the container with id 5bcfb9e1d299672ed1f6796b45e06f329d93f16c57eb5bb946fd02c73e045645 Feb 16 14:16:55 crc kubenswrapper[4816]: I0216 14:16:55.969433 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pv24l"] Feb 16 14:16:55 crc kubenswrapper[4816]: W0216 14:16:55.971771 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf000f02e_2505_4b27_b0ed_6d5fb679b7d1.slice/crio-00169a93241adc2232eb71f56682e454d8dd3fe09b3fb06f7d88728b55357009 WatchSource:0}: Error finding container 00169a93241adc2232eb71f56682e454d8dd3fe09b3fb06f7d88728b55357009: Status 404 returned error can't find the container with id 00169a93241adc2232eb71f56682e454d8dd3fe09b3fb06f7d88728b55357009 Feb 16 14:16:56 crc kubenswrapper[4816]: I0216 14:16:56.036476 4816 generic.go:334] "Generic (PLEG): container finished" podID="3e18f57d-16fb-44e1-9221-6915d6bfff0f" containerID="a7130ad6ee21cc3b21acc4654aa605582152cfb590c2ecb9b18e8a398fcacef3" exitCode=0 Feb 16 14:16:56 crc kubenswrapper[4816]: I0216 14:16:56.036559 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-98z2r" event={"ID":"3e18f57d-16fb-44e1-9221-6915d6bfff0f","Type":"ContainerDied","Data":"a7130ad6ee21cc3b21acc4654aa605582152cfb590c2ecb9b18e8a398fcacef3"} Feb 16 14:16:56 crc kubenswrapper[4816]: I0216 14:16:56.041046 4816 generic.go:334] "Generic (PLEG): container finished" podID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerID="0a0250651657986b481e5e58f81ea6b9d304ab90f6755fb15470001528d26794" exitCode=0 Feb 16 14:16:56 crc kubenswrapper[4816]: I0216 14:16:56.041105 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cctdj" event={"ID":"d51cc27b-afa3-40bd-a8d0-832145cc65d8","Type":"ContainerDied","Data":"0a0250651657986b481e5e58f81ea6b9d304ab90f6755fb15470001528d26794"} Feb 16 14:16:56 crc kubenswrapper[4816]: I0216 14:16:56.041135 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cctdj" event={"ID":"d51cc27b-afa3-40bd-a8d0-832145cc65d8","Type":"ContainerStarted","Data":"5bcfb9e1d299672ed1f6796b45e06f329d93f16c57eb5bb946fd02c73e045645"} Feb 16 14:16:56 crc kubenswrapper[4816]: I0216 14:16:56.043373 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pv24l" event={"ID":"f000f02e-2505-4b27-b0ed-6d5fb679b7d1","Type":"ContainerStarted","Data":"00169a93241adc2232eb71f56682e454d8dd3fe09b3fb06f7d88728b55357009"} Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.057068 4816 generic.go:334] "Generic (PLEG): container finished" podID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerID="7b0fc5b504389c39030c1dcf8544b1558bf22ff7e756d224baa5b9a4df7aa336" exitCode=0 Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.057296 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cctdj" event={"ID":"d51cc27b-afa3-40bd-a8d0-832145cc65d8","Type":"ContainerDied","Data":"7b0fc5b504389c39030c1dcf8544b1558bf22ff7e756d224baa5b9a4df7aa336"} Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.059514 4816 generic.go:334] "Generic (PLEG): container finished" podID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerID="04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce" exitCode=0 Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.059582 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pv24l" event={"ID":"f000f02e-2505-4b27-b0ed-6d5fb679b7d1","Type":"ContainerDied","Data":"04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce"} Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.351347 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.512129 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/3e18f57d-16fb-44e1-9221-6915d6bfff0f-node-mnt\") pod \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.512242 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgwvp\" (UniqueName: \"kubernetes.io/projected/3e18f57d-16fb-44e1-9221-6915d6bfff0f-kube-api-access-lgwvp\") pod \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.512252 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e18f57d-16fb-44e1-9221-6915d6bfff0f-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "3e18f57d-16fb-44e1-9221-6915d6bfff0f" (UID: "3e18f57d-16fb-44e1-9221-6915d6bfff0f"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.512279 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/3e18f57d-16fb-44e1-9221-6915d6bfff0f-crc-storage\") pod \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\" (UID: \"3e18f57d-16fb-44e1-9221-6915d6bfff0f\") " Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.512632 4816 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/3e18f57d-16fb-44e1-9221-6915d6bfff0f-node-mnt\") on node \"crc\" DevicePath \"\"" Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.519043 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e18f57d-16fb-44e1-9221-6915d6bfff0f-kube-api-access-lgwvp" (OuterVolumeSpecName: "kube-api-access-lgwvp") pod "3e18f57d-16fb-44e1-9221-6915d6bfff0f" (UID: "3e18f57d-16fb-44e1-9221-6915d6bfff0f"). InnerVolumeSpecName "kube-api-access-lgwvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.536197 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e18f57d-16fb-44e1-9221-6915d6bfff0f-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "3e18f57d-16fb-44e1-9221-6915d6bfff0f" (UID: "3e18f57d-16fb-44e1-9221-6915d6bfff0f"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.613830 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgwvp\" (UniqueName: \"kubernetes.io/projected/3e18f57d-16fb-44e1-9221-6915d6bfff0f-kube-api-access-lgwvp\") on node \"crc\" DevicePath \"\"" Feb 16 14:16:57 crc kubenswrapper[4816]: I0216 14:16:57.613868 4816 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/3e18f57d-16fb-44e1-9221-6915d6bfff0f-crc-storage\") on node \"crc\" DevicePath \"\"" Feb 16 14:16:58 crc kubenswrapper[4816]: I0216 14:16:58.066403 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-98z2r" event={"ID":"3e18f57d-16fb-44e1-9221-6915d6bfff0f","Type":"ContainerDied","Data":"e0e01ed0910f98ea477080a34f5af93f78f942ccecc0a3aaf6c9e9deb5e1a558"} Feb 16 14:16:58 crc kubenswrapper[4816]: I0216 14:16:58.066445 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0e01ed0910f98ea477080a34f5af93f78f942ccecc0a3aaf6c9e9deb5e1a558" Feb 16 14:16:58 crc kubenswrapper[4816]: I0216 14:16:58.066494 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-98z2r" Feb 16 14:16:58 crc kubenswrapper[4816]: I0216 14:16:58.069404 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cctdj" event={"ID":"d51cc27b-afa3-40bd-a8d0-832145cc65d8","Type":"ContainerStarted","Data":"f64ee244b4478d1bd88a09821ac9e794670c626b63124f91a76a5d79307c3154"} Feb 16 14:16:58 crc kubenswrapper[4816]: I0216 14:16:58.090810 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cctdj" podStartSLOduration=2.439377943 podStartE2EDuration="4.090794566s" podCreationTimestamp="2026-02-16 14:16:54 +0000 UTC" firstStartedPulling="2026-02-16 14:16:56.042476917 +0000 UTC m=+4415.369190655" lastFinishedPulling="2026-02-16 14:16:57.69389355 +0000 UTC m=+4417.020607278" observedRunningTime="2026-02-16 14:16:58.08837345 +0000 UTC m=+4417.415087208" watchObservedRunningTime="2026-02-16 14:16:58.090794566 +0000 UTC m=+4417.417508294" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.078410 4816 generic.go:334] "Generic (PLEG): container finished" podID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerID="46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86" exitCode=0 Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.078569 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pv24l" event={"ID":"f000f02e-2505-4b27-b0ed-6d5fb679b7d1","Type":"ContainerDied","Data":"46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86"} Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.410165 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-98z2r"] Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.415510 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-98z2r"] Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.530625 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-2lsns"] Feb 16 14:16:59 crc kubenswrapper[4816]: E0216 14:16:59.530926 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e18f57d-16fb-44e1-9221-6915d6bfff0f" containerName="storage" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.530942 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e18f57d-16fb-44e1-9221-6915d6bfff0f" containerName="storage" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.531079 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e18f57d-16fb-44e1-9221-6915d6bfff0f" containerName="storage" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.531517 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.533989 4816 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-6zflt" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.534227 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.534365 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.534562 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.549936 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-2lsns"] Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.639903 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-crc-storage\") pod \"crc-storage-crc-2lsns\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.640417 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58lff\" (UniqueName: \"kubernetes.io/projected/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-kube-api-access-58lff\") pod \"crc-storage-crc-2lsns\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.640514 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-node-mnt\") pod \"crc-storage-crc-2lsns\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.744441 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-crc-storage\") pod \"crc-storage-crc-2lsns\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.744517 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58lff\" (UniqueName: \"kubernetes.io/projected/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-kube-api-access-58lff\") pod \"crc-storage-crc-2lsns\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.744546 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-node-mnt\") pod \"crc-storage-crc-2lsns\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.744961 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-node-mnt\") pod \"crc-storage-crc-2lsns\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.746178 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-crc-storage\") pod \"crc-storage-crc-2lsns\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.765723 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58lff\" (UniqueName: \"kubernetes.io/projected/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-kube-api-access-58lff\") pod \"crc-storage-crc-2lsns\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:16:59 crc kubenswrapper[4816]: I0216 14:16:59.847835 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:17:00 crc kubenswrapper[4816]: I0216 14:17:00.088704 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pv24l" event={"ID":"f000f02e-2505-4b27-b0ed-6d5fb679b7d1","Type":"ContainerStarted","Data":"177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed"} Feb 16 14:17:00 crc kubenswrapper[4816]: I0216 14:17:00.106975 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pv24l" podStartSLOduration=2.475793522 podStartE2EDuration="5.106952919s" podCreationTimestamp="2026-02-16 14:16:55 +0000 UTC" firstStartedPulling="2026-02-16 14:16:57.060991207 +0000 UTC m=+4416.387704945" lastFinishedPulling="2026-02-16 14:16:59.692150614 +0000 UTC m=+4419.018864342" observedRunningTime="2026-02-16 14:17:00.105399226 +0000 UTC m=+4419.432112974" watchObservedRunningTime="2026-02-16 14:17:00.106952919 +0000 UTC m=+4419.433666647" Feb 16 14:17:00 crc kubenswrapper[4816]: I0216 14:17:00.269287 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-2lsns"] Feb 16 14:17:00 crc kubenswrapper[4816]: W0216 14:17:00.275301 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c8e2e91_3d6a_45ed_9112_bf2ae61164bf.slice/crio-ae1863f6ad1f58dea35a0261258a28e1a972d9434634e93f91f33ce4801ff44f WatchSource:0}: Error finding container ae1863f6ad1f58dea35a0261258a28e1a972d9434634e93f91f33ce4801ff44f: Status 404 returned error can't find the container with id ae1863f6ad1f58dea35a0261258a28e1a972d9434634e93f91f33ce4801ff44f Feb 16 14:17:01 crc kubenswrapper[4816]: I0216 14:17:01.098025 4816 generic.go:334] "Generic (PLEG): container finished" podID="5c8e2e91-3d6a-45ed-9112-bf2ae61164bf" containerID="39f991f7edf525874583568c8a0d5221e76445bf690678def29b585a7949a1f4" exitCode=0 Feb 16 14:17:01 crc kubenswrapper[4816]: I0216 14:17:01.098079 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2lsns" event={"ID":"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf","Type":"ContainerDied","Data":"39f991f7edf525874583568c8a0d5221e76445bf690678def29b585a7949a1f4"} Feb 16 14:17:01 crc kubenswrapper[4816]: I0216 14:17:01.098412 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2lsns" event={"ID":"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf","Type":"ContainerStarted","Data":"ae1863f6ad1f58dea35a0261258a28e1a972d9434634e93f91f33ce4801ff44f"} Feb 16 14:17:01 crc kubenswrapper[4816]: I0216 14:17:01.410736 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e18f57d-16fb-44e1-9221-6915d6bfff0f" path="/var/lib/kubelet/pods/3e18f57d-16fb-44e1-9221-6915d6bfff0f/volumes" Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.364256 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.482044 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-crc-storage\") pod \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.482197 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58lff\" (UniqueName: \"kubernetes.io/projected/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-kube-api-access-58lff\") pod \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.482294 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-node-mnt\") pod \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\" (UID: \"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf\") " Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.482395 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "5c8e2e91-3d6a-45ed-9112-bf2ae61164bf" (UID: "5c8e2e91-3d6a-45ed-9112-bf2ae61164bf"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.482708 4816 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-node-mnt\") on node \"crc\" DevicePath \"\"" Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.488748 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-kube-api-access-58lff" (OuterVolumeSpecName: "kube-api-access-58lff") pod "5c8e2e91-3d6a-45ed-9112-bf2ae61164bf" (UID: "5c8e2e91-3d6a-45ed-9112-bf2ae61164bf"). InnerVolumeSpecName "kube-api-access-58lff". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.499208 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "5c8e2e91-3d6a-45ed-9112-bf2ae61164bf" (UID: "5c8e2e91-3d6a-45ed-9112-bf2ae61164bf"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.584218 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58lff\" (UniqueName: \"kubernetes.io/projected/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-kube-api-access-58lff\") on node \"crc\" DevicePath \"\"" Feb 16 14:17:02 crc kubenswrapper[4816]: I0216 14:17:02.584249 4816 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5c8e2e91-3d6a-45ed-9112-bf2ae61164bf-crc-storage\") on node \"crc\" DevicePath \"\"" Feb 16 14:17:03 crc kubenswrapper[4816]: I0216 14:17:03.115383 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2lsns" event={"ID":"5c8e2e91-3d6a-45ed-9112-bf2ae61164bf","Type":"ContainerDied","Data":"ae1863f6ad1f58dea35a0261258a28e1a972d9434634e93f91f33ce4801ff44f"} Feb 16 14:17:03 crc kubenswrapper[4816]: I0216 14:17:03.115827 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae1863f6ad1f58dea35a0261258a28e1a972d9434634e93f91f33ce4801ff44f" Feb 16 14:17:03 crc kubenswrapper[4816]: I0216 14:17:03.115440 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2lsns" Feb 16 14:17:05 crc kubenswrapper[4816]: I0216 14:17:05.332134 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:17:05 crc kubenswrapper[4816]: I0216 14:17:05.332524 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:17:05 crc kubenswrapper[4816]: I0216 14:17:05.408477 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:17:05 crc kubenswrapper[4816]: I0216 14:17:05.512910 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:17:05 crc kubenswrapper[4816]: I0216 14:17:05.513275 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:17:05 crc kubenswrapper[4816]: I0216 14:17:05.560569 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:17:06 crc kubenswrapper[4816]: I0216 14:17:06.180172 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:17:06 crc kubenswrapper[4816]: I0216 14:17:06.198794 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:17:07 crc kubenswrapper[4816]: I0216 14:17:07.056325 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pv24l"] Feb 16 14:17:08 crc kubenswrapper[4816]: I0216 14:17:08.151703 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pv24l" podUID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerName="registry-server" containerID="cri-o://177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed" gracePeriod=2 Feb 16 14:17:08 crc kubenswrapper[4816]: I0216 14:17:08.450383 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cctdj"] Feb 16 14:17:08 crc kubenswrapper[4816]: I0216 14:17:08.450616 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cctdj" podUID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerName="registry-server" containerID="cri-o://f64ee244b4478d1bd88a09821ac9e794670c626b63124f91a76a5d79307c3154" gracePeriod=2 Feb 16 14:17:08 crc kubenswrapper[4816]: I0216 14:17:08.984846 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.161846 4816 generic.go:334] "Generic (PLEG): container finished" podID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerID="177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed" exitCode=0 Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.161895 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pv24l" event={"ID":"f000f02e-2505-4b27-b0ed-6d5fb679b7d1","Type":"ContainerDied","Data":"177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed"} Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.161923 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pv24l" event={"ID":"f000f02e-2505-4b27-b0ed-6d5fb679b7d1","Type":"ContainerDied","Data":"00169a93241adc2232eb71f56682e454d8dd3fe09b3fb06f7d88728b55357009"} Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.161945 4816 scope.go:117] "RemoveContainer" containerID="177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.162065 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pv24l" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.175183 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-catalog-content\") pod \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.175257 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-utilities\") pod \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.175327 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8d5sk\" (UniqueName: \"kubernetes.io/projected/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-kube-api-access-8d5sk\") pod \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\" (UID: \"f000f02e-2505-4b27-b0ed-6d5fb679b7d1\") " Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.177708 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-utilities" (OuterVolumeSpecName: "utilities") pod "f000f02e-2505-4b27-b0ed-6d5fb679b7d1" (UID: "f000f02e-2505-4b27-b0ed-6d5fb679b7d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.177863 4816 scope.go:117] "RemoveContainer" containerID="46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.201965 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-kube-api-access-8d5sk" (OuterVolumeSpecName: "kube-api-access-8d5sk") pod "f000f02e-2505-4b27-b0ed-6d5fb679b7d1" (UID: "f000f02e-2505-4b27-b0ed-6d5fb679b7d1"). InnerVolumeSpecName "kube-api-access-8d5sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.242560 4816 scope.go:117] "RemoveContainer" containerID="04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.273705 4816 scope.go:117] "RemoveContainer" containerID="177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed" Feb 16 14:17:09 crc kubenswrapper[4816]: E0216 14:17:09.274192 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed\": container with ID starting with 177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed not found: ID does not exist" containerID="177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.274230 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed"} err="failed to get container status \"177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed\": rpc error: code = NotFound desc = could not find container \"177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed\": container with ID starting with 177bc2b60da1a2ca353a82eebf6601f8ba62ff185b8cdcbb99aa8dc70e0c18ed not found: ID does not exist" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.274256 4816 scope.go:117] "RemoveContainer" containerID="46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86" Feb 16 14:17:09 crc kubenswrapper[4816]: E0216 14:17:09.274731 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86\": container with ID starting with 46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86 not found: ID does not exist" containerID="46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.274777 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86"} err="failed to get container status \"46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86\": rpc error: code = NotFound desc = could not find container \"46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86\": container with ID starting with 46b9495cd9cffb38e53ad068180966dd9494484dcfd676f98c1aeca9bc6ffd86 not found: ID does not exist" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.274807 4816 scope.go:117] "RemoveContainer" containerID="04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce" Feb 16 14:17:09 crc kubenswrapper[4816]: E0216 14:17:09.275147 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce\": container with ID starting with 04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce not found: ID does not exist" containerID="04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.275184 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce"} err="failed to get container status \"04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce\": rpc error: code = NotFound desc = could not find container \"04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce\": container with ID starting with 04b17c0408388cc8074b97a8b21c634fcf3ed8a57da2e64b37b826a69a2ce7ce not found: ID does not exist" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.277101 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:17:09 crc kubenswrapper[4816]: I0216 14:17:09.277125 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8d5sk\" (UniqueName: \"kubernetes.io/projected/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-kube-api-access-8d5sk\") on node \"crc\" DevicePath \"\"" Feb 16 14:17:10 crc kubenswrapper[4816]: I0216 14:17:10.170009 4816 generic.go:334] "Generic (PLEG): container finished" podID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerID="f64ee244b4478d1bd88a09821ac9e794670c626b63124f91a76a5d79307c3154" exitCode=0 Feb 16 14:17:10 crc kubenswrapper[4816]: I0216 14:17:10.170067 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cctdj" event={"ID":"d51cc27b-afa3-40bd-a8d0-832145cc65d8","Type":"ContainerDied","Data":"f64ee244b4478d1bd88a09821ac9e794670c626b63124f91a76a5d79307c3154"} Feb 16 14:17:10 crc kubenswrapper[4816]: I0216 14:17:10.364754 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f000f02e-2505-4b27-b0ed-6d5fb679b7d1" (UID: "f000f02e-2505-4b27-b0ed-6d5fb679b7d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:17:10 crc kubenswrapper[4816]: I0216 14:17:10.393941 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f000f02e-2505-4b27-b0ed-6d5fb679b7d1-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:17:10 crc kubenswrapper[4816]: I0216 14:17:10.701037 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pv24l"] Feb 16 14:17:10 crc kubenswrapper[4816]: I0216 14:17:10.711070 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pv24l"] Feb 16 14:17:10 crc kubenswrapper[4816]: I0216 14:17:10.908830 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.000966 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-catalog-content\") pod \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.001028 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-utilities\") pod \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.001160 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txgtz\" (UniqueName: \"kubernetes.io/projected/d51cc27b-afa3-40bd-a8d0-832145cc65d8-kube-api-access-txgtz\") pod \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\" (UID: \"d51cc27b-afa3-40bd-a8d0-832145cc65d8\") " Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.001943 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-utilities" (OuterVolumeSpecName: "utilities") pod "d51cc27b-afa3-40bd-a8d0-832145cc65d8" (UID: "d51cc27b-afa3-40bd-a8d0-832145cc65d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.006549 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d51cc27b-afa3-40bd-a8d0-832145cc65d8-kube-api-access-txgtz" (OuterVolumeSpecName: "kube-api-access-txgtz") pod "d51cc27b-afa3-40bd-a8d0-832145cc65d8" (UID: "d51cc27b-afa3-40bd-a8d0-832145cc65d8"). InnerVolumeSpecName "kube-api-access-txgtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.027422 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d51cc27b-afa3-40bd-a8d0-832145cc65d8" (UID: "d51cc27b-afa3-40bd-a8d0-832145cc65d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.102416 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txgtz\" (UniqueName: \"kubernetes.io/projected/d51cc27b-afa3-40bd-a8d0-832145cc65d8-kube-api-access-txgtz\") on node \"crc\" DevicePath \"\"" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.102453 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.102463 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d51cc27b-afa3-40bd-a8d0-832145cc65d8-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.181295 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cctdj" event={"ID":"d51cc27b-afa3-40bd-a8d0-832145cc65d8","Type":"ContainerDied","Data":"5bcfb9e1d299672ed1f6796b45e06f329d93f16c57eb5bb946fd02c73e045645"} Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.181362 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cctdj" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.181386 4816 scope.go:117] "RemoveContainer" containerID="f64ee244b4478d1bd88a09821ac9e794670c626b63124f91a76a5d79307c3154" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.210389 4816 scope.go:117] "RemoveContainer" containerID="7b0fc5b504389c39030c1dcf8544b1558bf22ff7e756d224baa5b9a4df7aa336" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.219304 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cctdj"] Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.225622 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cctdj"] Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.231555 4816 scope.go:117] "RemoveContainer" containerID="0a0250651657986b481e5e58f81ea6b9d304ab90f6755fb15470001528d26794" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.408454 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" path="/var/lib/kubelet/pods/d51cc27b-afa3-40bd-a8d0-832145cc65d8/volumes" Feb 16 14:17:11 crc kubenswrapper[4816]: I0216 14:17:11.409373 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" path="/var/lib/kubelet/pods/f000f02e-2505-4b27-b0ed-6d5fb679b7d1/volumes" Feb 16 14:17:26 crc kubenswrapper[4816]: I0216 14:17:26.903032 4816 scope.go:117] "RemoveContainer" containerID="1b36b192d86353df7f3dd236e08170d9806e2046639e3b918c41edf8f3b3be91" Feb 16 14:18:36 crc kubenswrapper[4816]: I0216 14:18:36.941416 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:18:36 crc kubenswrapper[4816]: I0216 14:18:36.942105 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:19:06 crc kubenswrapper[4816]: I0216 14:19:06.941180 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:19:06 crc kubenswrapper[4816]: I0216 14:19:06.941870 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:19:36 crc kubenswrapper[4816]: I0216 14:19:36.940951 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:19:36 crc kubenswrapper[4816]: I0216 14:19:36.941522 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:19:36 crc kubenswrapper[4816]: I0216 14:19:36.941565 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:19:36 crc kubenswrapper[4816]: I0216 14:19:36.942225 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:19:36 crc kubenswrapper[4816]: I0216 14:19:36.942328 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" gracePeriod=600 Feb 16 14:19:37 crc kubenswrapper[4816]: E0216 14:19:37.071688 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:19:37 crc kubenswrapper[4816]: I0216 14:19:37.698024 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" exitCode=0 Feb 16 14:19:37 crc kubenswrapper[4816]: I0216 14:19:37.698080 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690"} Feb 16 14:19:37 crc kubenswrapper[4816]: I0216 14:19:37.698114 4816 scope.go:117] "RemoveContainer" containerID="caed64ee3964a4421120d568fb03c81370a76bf657975e0b221514bdb9b6cf0f" Feb 16 14:19:37 crc kubenswrapper[4816]: I0216 14:19:37.698650 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:19:37 crc kubenswrapper[4816]: E0216 14:19:37.698971 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:19:51 crc kubenswrapper[4816]: I0216 14:19:51.403522 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:19:51 crc kubenswrapper[4816]: E0216 14:19:51.404586 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:20:03 crc kubenswrapper[4816]: I0216 14:20:03.398568 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:20:03 crc kubenswrapper[4816]: E0216 14:20:03.399372 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.622645 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-2jflt"] Feb 16 14:20:14 crc kubenswrapper[4816]: E0216 14:20:14.623607 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerName="registry-server" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.623627 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerName="registry-server" Feb 16 14:20:14 crc kubenswrapper[4816]: E0216 14:20:14.623671 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerName="extract-utilities" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.623681 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerName="extract-utilities" Feb 16 14:20:14 crc kubenswrapper[4816]: E0216 14:20:14.623693 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerName="extract-content" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.623702 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerName="extract-content" Feb 16 14:20:14 crc kubenswrapper[4816]: E0216 14:20:14.623717 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c8e2e91-3d6a-45ed-9112-bf2ae61164bf" containerName="storage" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.623725 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c8e2e91-3d6a-45ed-9112-bf2ae61164bf" containerName="storage" Feb 16 14:20:14 crc kubenswrapper[4816]: E0216 14:20:14.623748 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerName="extract-utilities" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.623756 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerName="extract-utilities" Feb 16 14:20:14 crc kubenswrapper[4816]: E0216 14:20:14.623768 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerName="extract-content" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.623776 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerName="extract-content" Feb 16 14:20:14 crc kubenswrapper[4816]: E0216 14:20:14.623787 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerName="registry-server" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.623794 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerName="registry-server" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.623988 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f000f02e-2505-4b27-b0ed-6d5fb679b7d1" containerName="registry-server" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.624013 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c8e2e91-3d6a-45ed-9112-bf2ae61164bf" containerName="storage" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.624029 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d51cc27b-afa3-40bd-a8d0-832145cc65d8" containerName="registry-server" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.624861 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.627420 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.627812 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.627704 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.628071 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-rnf44" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.628322 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.636365 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-2jflt"] Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.698936 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-config\") pod \"dnsmasq-dns-5d7b5456f5-2jflt\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.699061 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-2jflt\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.699170 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5ld4\" (UniqueName: \"kubernetes.io/projected/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-kube-api-access-m5ld4\") pod \"dnsmasq-dns-5d7b5456f5-2jflt\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.800203 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-2jflt\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.800307 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5ld4\" (UniqueName: \"kubernetes.io/projected/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-kube-api-access-m5ld4\") pod \"dnsmasq-dns-5d7b5456f5-2jflt\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.800340 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-config\") pod \"dnsmasq-dns-5d7b5456f5-2jflt\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.801068 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-2jflt\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.801172 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-config\") pod \"dnsmasq-dns-5d7b5456f5-2jflt\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.840809 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5ld4\" (UniqueName: \"kubernetes.io/projected/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-kube-api-access-m5ld4\") pod \"dnsmasq-dns-5d7b5456f5-2jflt\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.854208 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-7jzfh"] Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.857100 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.880376 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-7jzfh"] Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.901085 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-7jzfh\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.901166 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/94ab64de-1d03-4d78-a575-5427a0108eb0-kube-api-access-b6gkz\") pod \"dnsmasq-dns-98ddfc8f-7jzfh\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.901189 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-config\") pod \"dnsmasq-dns-98ddfc8f-7jzfh\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:14 crc kubenswrapper[4816]: I0216 14:20:14.941282 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.002945 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-7jzfh\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.003100 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/94ab64de-1d03-4d78-a575-5427a0108eb0-kube-api-access-b6gkz\") pod \"dnsmasq-dns-98ddfc8f-7jzfh\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.003132 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-config\") pod \"dnsmasq-dns-98ddfc8f-7jzfh\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.004156 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-7jzfh\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.004463 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-config\") pod \"dnsmasq-dns-98ddfc8f-7jzfh\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.025093 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/94ab64de-1d03-4d78-a575-5427a0108eb0-kube-api-access-b6gkz\") pod \"dnsmasq-dns-98ddfc8f-7jzfh\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.193156 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.459086 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-2jflt"] Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.622781 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-7jzfh"] Feb 16 14:20:15 crc kubenswrapper[4816]: W0216 14:20:15.626185 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94ab64de_1d03_4d78_a575_5427a0108eb0.slice/crio-0604b545c7837991f9d75ef6037863a025560c765dd80972e7e9c607105bcc38 WatchSource:0}: Error finding container 0604b545c7837991f9d75ef6037863a025560c765dd80972e7e9c607105bcc38: Status 404 returned error can't find the container with id 0604b545c7837991f9d75ef6037863a025560c765dd80972e7e9c607105bcc38 Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.738636 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.740013 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.741565 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-9zgn7" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.742020 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.742306 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.742463 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.744766 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.761415 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.914406 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz225\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-kube-api-access-rz225\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.914885 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc63654c-c1e8-4470-8345-57e5a576d246-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.914942 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.914972 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc63654c-c1e8-4470-8345-57e5a576d246-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.915038 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.915088 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.915119 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.915250 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:15 crc kubenswrapper[4816]: I0216 14:20:15.915474 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.016923 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.016997 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz225\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-kube-api-access-rz225\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.017021 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc63654c-c1e8-4470-8345-57e5a576d246-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.017048 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.017069 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc63654c-c1e8-4470-8345-57e5a576d246-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.017086 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.017113 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.017133 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.017154 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.017767 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.018220 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.018294 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.019225 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.022544 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.022604 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a5678448aeb9611164dd6454bd92d995887f86f00f3350ffea9ae651dc862e2c/globalmount\"" pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.023487 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc63654c-c1e8-4470-8345-57e5a576d246-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.023594 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.024751 4816 generic.go:334] "Generic (PLEG): container finished" podID="cf17fd2b-93cf-46fd-8ef5-1237eb707b64" containerID="406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a" exitCode=0 Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.024988 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" event={"ID":"cf17fd2b-93cf-46fd-8ef5-1237eb707b64","Type":"ContainerDied","Data":"406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a"} Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.025135 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" event={"ID":"cf17fd2b-93cf-46fd-8ef5-1237eb707b64","Type":"ContainerStarted","Data":"26f53e0eba41e5f2d564b854a0d2dcc168fd99f35ad6ec6c4daa0ac66fc127ef"} Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.025404 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc63654c-c1e8-4470-8345-57e5a576d246-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.030242 4816 generic.go:334] "Generic (PLEG): container finished" podID="94ab64de-1d03-4d78-a575-5427a0108eb0" containerID="dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca" exitCode=0 Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.030290 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" event={"ID":"94ab64de-1d03-4d78-a575-5427a0108eb0","Type":"ContainerDied","Data":"dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca"} Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.030320 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" event={"ID":"94ab64de-1d03-4d78-a575-5427a0108eb0","Type":"ContainerStarted","Data":"0604b545c7837991f9d75ef6037863a025560c765dd80972e7e9c607105bcc38"} Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.031834 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.035922 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.040673 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.040847 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.040871 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.041006 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.041043 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-985r5" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.054950 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.058517 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz225\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-kube-api-access-rz225\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.080023 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") pod \"rabbitmq-server-0\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.219775 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtff7\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-kube-api-access-gtff7\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.220041 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.220157 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.220265 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bdb83503-25de-4204-b73d-b1c5fc3510d7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.220375 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.220462 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.221049 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.221488 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.221764 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bdb83503-25de-4204-b73d-b1c5fc3510d7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.323146 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.323234 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.323303 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.323333 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bdb83503-25de-4204-b73d-b1c5fc3510d7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.323384 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtff7\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-kube-api-access-gtff7\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.323407 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.323425 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.323464 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bdb83503-25de-4204-b73d-b1c5fc3510d7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.323490 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.324221 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.324263 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.324697 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.324962 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.326906 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.326933 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2144d1a623b131c734fb9987db4b6b56c2cd53bd1708342ff09b0e1feffecd8a/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.327696 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.327713 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bdb83503-25de-4204-b73d-b1c5fc3510d7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.328252 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bdb83503-25de-4204-b73d-b1c5fc3510d7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.343188 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtff7\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-kube-api-access-gtff7\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.356283 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") pod \"rabbitmq-cell1-server-0\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.372426 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.510829 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:16 crc kubenswrapper[4816]: W0216 14:20:16.786530 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc63654c_c1e8_4470_8345_57e5a576d246.slice/crio-e62e757917a91d9ce1dfb832fdda1327d26f74b03d739033c979e451c10f50bb WatchSource:0}: Error finding container e62e757917a91d9ce1dfb832fdda1327d26f74b03d739033c979e451c10f50bb: Status 404 returned error can't find the container with id e62e757917a91d9ce1dfb832fdda1327d26f74b03d739033c979e451c10f50bb Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.787139 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 14:20:16 crc kubenswrapper[4816]: I0216 14:20:16.950066 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.038043 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" event={"ID":"94ab64de-1d03-4d78-a575-5427a0108eb0","Type":"ContainerStarted","Data":"b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53"} Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.038188 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.039709 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cc63654c-c1e8-4470-8345-57e5a576d246","Type":"ContainerStarted","Data":"e62e757917a91d9ce1dfb832fdda1327d26f74b03d739033c979e451c10f50bb"} Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.040679 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bdb83503-25de-4204-b73d-b1c5fc3510d7","Type":"ContainerStarted","Data":"85356b904c74845a61416114e504f57855cbe892fc4cafb492d815804b2a9c1d"} Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.042403 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" event={"ID":"cf17fd2b-93cf-46fd-8ef5-1237eb707b64","Type":"ContainerStarted","Data":"0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5"} Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.042557 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.057685 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" podStartSLOduration=3.057666806 podStartE2EDuration="3.057666806s" podCreationTimestamp="2026-02-16 14:20:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:20:17.051667023 +0000 UTC m=+4616.378380751" watchObservedRunningTime="2026-02-16 14:20:17.057666806 +0000 UTC m=+4616.384380534" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.069632 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" podStartSLOduration=3.069601443 podStartE2EDuration="3.069601443s" podCreationTimestamp="2026-02-16 14:20:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:20:17.068230465 +0000 UTC m=+4616.394944193" watchObservedRunningTime="2026-02-16 14:20:17.069601443 +0000 UTC m=+4616.396315171" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.219207 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.220726 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.222983 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.223298 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.223791 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-c654x" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.224049 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.240632 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.244402 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.342722 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8fbda533-421c-4e67-8f65-4970f0c27924-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.342876 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8fbda533-421c-4e67-8f65-4970f0c27924-config-data-default\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.342939 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvz56\" (UniqueName: \"kubernetes.io/projected/8fbda533-421c-4e67-8f65-4970f0c27924-kube-api-access-rvz56\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.342982 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbda533-421c-4e67-8f65-4970f0c27924-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.343057 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8fbda533-421c-4e67-8f65-4970f0c27924-kolla-config\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.343085 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fbda533-421c-4e67-8f65-4970f0c27924-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.343122 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8fbda533-421c-4e67-8f65-4970f0c27924-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.343148 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ca4156f2-5415-4d7e-9a72-076ed8976ee0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca4156f2-5415-4d7e-9a72-076ed8976ee0\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.399230 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:20:17 crc kubenswrapper[4816]: E0216 14:20:17.399612 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.444082 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8fbda533-421c-4e67-8f65-4970f0c27924-config-data-default\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.444144 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvz56\" (UniqueName: \"kubernetes.io/projected/8fbda533-421c-4e67-8f65-4970f0c27924-kube-api-access-rvz56\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.444177 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbda533-421c-4e67-8f65-4970f0c27924-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.444215 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8fbda533-421c-4e67-8f65-4970f0c27924-kolla-config\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.444236 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fbda533-421c-4e67-8f65-4970f0c27924-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.444264 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8fbda533-421c-4e67-8f65-4970f0c27924-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.444285 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ca4156f2-5415-4d7e-9a72-076ed8976ee0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca4156f2-5415-4d7e-9a72-076ed8976ee0\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.444305 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8fbda533-421c-4e67-8f65-4970f0c27924-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.445173 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8fbda533-421c-4e67-8f65-4970f0c27924-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.445513 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8fbda533-421c-4e67-8f65-4970f0c27924-config-data-default\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.445552 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8fbda533-421c-4e67-8f65-4970f0c27924-kolla-config\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.445944 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8fbda533-421c-4e67-8f65-4970f0c27924-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.449430 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbda533-421c-4e67-8f65-4970f0c27924-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.449560 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fbda533-421c-4e67-8f65-4970f0c27924-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.450135 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.450174 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ca4156f2-5415-4d7e-9a72-076ed8976ee0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca4156f2-5415-4d7e-9a72-076ed8976ee0\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d15158989f58d4ad04f8f55dcfad2175fcf83d758bf212760ab3857cf149d37b/globalmount\"" pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.466549 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvz56\" (UniqueName: \"kubernetes.io/projected/8fbda533-421c-4e67-8f65-4970f0c27924-kube-api-access-rvz56\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.487680 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ca4156f2-5415-4d7e-9a72-076ed8976ee0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca4156f2-5415-4d7e-9a72-076ed8976ee0\") pod \"openstack-galera-0\" (UID: \"8fbda533-421c-4e67-8f65-4970f0c27924\") " pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.514326 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.515259 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.517186 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.517682 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-4qx6k" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.530822 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.541899 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.647053 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/16de8506-0f12-4daa-84b8-3cdd03d266a8-kolla-config\") pod \"memcached-0\" (UID: \"16de8506-0f12-4daa-84b8-3cdd03d266a8\") " pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.647179 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16de8506-0f12-4daa-84b8-3cdd03d266a8-config-data\") pod \"memcached-0\" (UID: \"16de8506-0f12-4daa-84b8-3cdd03d266a8\") " pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.647213 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvhth\" (UniqueName: \"kubernetes.io/projected/16de8506-0f12-4daa-84b8-3cdd03d266a8-kube-api-access-rvhth\") pod \"memcached-0\" (UID: \"16de8506-0f12-4daa-84b8-3cdd03d266a8\") " pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.748691 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/16de8506-0f12-4daa-84b8-3cdd03d266a8-kolla-config\") pod \"memcached-0\" (UID: \"16de8506-0f12-4daa-84b8-3cdd03d266a8\") " pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.748802 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16de8506-0f12-4daa-84b8-3cdd03d266a8-config-data\") pod \"memcached-0\" (UID: \"16de8506-0f12-4daa-84b8-3cdd03d266a8\") " pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.748834 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvhth\" (UniqueName: \"kubernetes.io/projected/16de8506-0f12-4daa-84b8-3cdd03d266a8-kube-api-access-rvhth\") pod \"memcached-0\" (UID: \"16de8506-0f12-4daa-84b8-3cdd03d266a8\") " pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.749535 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/16de8506-0f12-4daa-84b8-3cdd03d266a8-kolla-config\") pod \"memcached-0\" (UID: \"16de8506-0f12-4daa-84b8-3cdd03d266a8\") " pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.749564 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16de8506-0f12-4daa-84b8-3cdd03d266a8-config-data\") pod \"memcached-0\" (UID: \"16de8506-0f12-4daa-84b8-3cdd03d266a8\") " pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.764693 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvhth\" (UniqueName: \"kubernetes.io/projected/16de8506-0f12-4daa-84b8-3cdd03d266a8-kube-api-access-rvhth\") pod \"memcached-0\" (UID: \"16de8506-0f12-4daa-84b8-3cdd03d266a8\") " pod="openstack/memcached-0" Feb 16 14:20:17 crc kubenswrapper[4816]: I0216 14:20:17.831760 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.056411 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 16 14:20:18 crc kubenswrapper[4816]: W0216 14:20:18.106571 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fbda533_421c_4e67_8f65_4970f0c27924.slice/crio-e592203ce63ec56b208912b580a56792d85509be3368958632fbb122dd259357 WatchSource:0}: Error finding container e592203ce63ec56b208912b580a56792d85509be3368958632fbb122dd259357: Status 404 returned error can't find the container with id e592203ce63ec56b208912b580a56792d85509be3368958632fbb122dd259357 Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.501563 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 16 14:20:18 crc kubenswrapper[4816]: W0216 14:20:18.503136 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16de8506_0f12_4daa_84b8_3cdd03d266a8.slice/crio-54181b01bd5f5fa1d9ef82e9a96f9186913bcbc2b4b98466ff9405d77bdb6cf5 WatchSource:0}: Error finding container 54181b01bd5f5fa1d9ef82e9a96f9186913bcbc2b4b98466ff9405d77bdb6cf5: Status 404 returned error can't find the container with id 54181b01bd5f5fa1d9ef82e9a96f9186913bcbc2b4b98466ff9405d77bdb6cf5 Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.699466 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.700628 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.703061 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-zpdr9" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.703285 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.703335 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.703480 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.721460 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.862445 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.862517 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.862540 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.862601 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.862750 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.862816 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-60ddb1ad-eced-4e8b-9e8c-4ce495562bce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-60ddb1ad-eced-4e8b-9e8c-4ce495562bce\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.862873 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.862949 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf7cv\" (UniqueName: \"kubernetes.io/projected/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-kube-api-access-qf7cv\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.964448 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.964508 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.964531 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.964569 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.964589 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.964616 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-60ddb1ad-eced-4e8b-9e8c-4ce495562bce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-60ddb1ad-eced-4e8b-9e8c-4ce495562bce\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.964640 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.964696 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf7cv\" (UniqueName: \"kubernetes.io/projected/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-kube-api-access-qf7cv\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.965352 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.965530 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.965901 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.966220 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.967444 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.967614 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-60ddb1ad-eced-4e8b-9e8c-4ce495562bce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-60ddb1ad-eced-4e8b-9e8c-4ce495562bce\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f15189cfa1988994399ac2199e5d1427633b4f2880eaec0490d232858b1e3b36/globalmount\"" pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.970011 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.971791 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.983272 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf7cv\" (UniqueName: \"kubernetes.io/projected/2152ebde-94a5-401a-a9c0-f2ca76c5a16e-kube-api-access-qf7cv\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:18 crc kubenswrapper[4816]: I0216 14:20:18.993167 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-60ddb1ad-eced-4e8b-9e8c-4ce495562bce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-60ddb1ad-eced-4e8b-9e8c-4ce495562bce\") pod \"openstack-cell1-galera-0\" (UID: \"2152ebde-94a5-401a-a9c0-f2ca76c5a16e\") " pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.058529 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cc63654c-c1e8-4470-8345-57e5a576d246","Type":"ContainerStarted","Data":"d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4"} Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.059875 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bdb83503-25de-4204-b73d-b1c5fc3510d7","Type":"ContainerStarted","Data":"a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344"} Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.062062 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"16de8506-0f12-4daa-84b8-3cdd03d266a8","Type":"ContainerStarted","Data":"d4ddf1311e76c224b6109a3b96dba0108a84f9f4cc3645f7cd850e35e634fa1a"} Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.062119 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"16de8506-0f12-4daa-84b8-3cdd03d266a8","Type":"ContainerStarted","Data":"54181b01bd5f5fa1d9ef82e9a96f9186913bcbc2b4b98466ff9405d77bdb6cf5"} Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.062221 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.063580 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8fbda533-421c-4e67-8f65-4970f0c27924","Type":"ContainerStarted","Data":"bef5f8886cfa9f910deaacbf95a5f7d4d73d0c01be2618743c04407419177211"} Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.063621 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8fbda533-421c-4e67-8f65-4970f0c27924","Type":"ContainerStarted","Data":"e592203ce63ec56b208912b580a56792d85509be3368958632fbb122dd259357"} Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.070463 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.105110 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.105085791 podStartE2EDuration="2.105085791s" podCreationTimestamp="2026-02-16 14:20:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:20:19.10027064 +0000 UTC m=+4618.426984368" watchObservedRunningTime="2026-02-16 14:20:19.105085791 +0000 UTC m=+4618.431799529" Feb 16 14:20:19 crc kubenswrapper[4816]: W0216 14:20:19.527212 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2152ebde_94a5_401a_a9c0_f2ca76c5a16e.slice/crio-b419ffeb0ecc337b9e2a2902e88a970948a5a7e444d23db469d8d15e5cd90121 WatchSource:0}: Error finding container b419ffeb0ecc337b9e2a2902e88a970948a5a7e444d23db469d8d15e5cd90121: Status 404 returned error can't find the container with id b419ffeb0ecc337b9e2a2902e88a970948a5a7e444d23db469d8d15e5cd90121 Feb 16 14:20:19 crc kubenswrapper[4816]: I0216 14:20:19.528517 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 16 14:20:20 crc kubenswrapper[4816]: I0216 14:20:20.072268 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2152ebde-94a5-401a-a9c0-f2ca76c5a16e","Type":"ContainerStarted","Data":"8ccbf4f51b7e7ab4530d8260caaa9673fb0aa778a19698ce9fe4059522c5e626"} Feb 16 14:20:20 crc kubenswrapper[4816]: I0216 14:20:20.072310 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2152ebde-94a5-401a-a9c0-f2ca76c5a16e","Type":"ContainerStarted","Data":"b419ffeb0ecc337b9e2a2902e88a970948a5a7e444d23db469d8d15e5cd90121"} Feb 16 14:20:22 crc kubenswrapper[4816]: I0216 14:20:22.095088 4816 generic.go:334] "Generic (PLEG): container finished" podID="8fbda533-421c-4e67-8f65-4970f0c27924" containerID="bef5f8886cfa9f910deaacbf95a5f7d4d73d0c01be2618743c04407419177211" exitCode=0 Feb 16 14:20:22 crc kubenswrapper[4816]: I0216 14:20:22.096362 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8fbda533-421c-4e67-8f65-4970f0c27924","Type":"ContainerDied","Data":"bef5f8886cfa9f910deaacbf95a5f7d4d73d0c01be2618743c04407419177211"} Feb 16 14:20:23 crc kubenswrapper[4816]: I0216 14:20:23.107462 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8fbda533-421c-4e67-8f65-4970f0c27924","Type":"ContainerStarted","Data":"7b24154e287e37c331f0ecbdc0922354e4035da88fa2ff69f61336ccebf205ab"} Feb 16 14:20:23 crc kubenswrapper[4816]: I0216 14:20:23.110136 4816 generic.go:334] "Generic (PLEG): container finished" podID="2152ebde-94a5-401a-a9c0-f2ca76c5a16e" containerID="8ccbf4f51b7e7ab4530d8260caaa9673fb0aa778a19698ce9fe4059522c5e626" exitCode=0 Feb 16 14:20:23 crc kubenswrapper[4816]: I0216 14:20:23.110203 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2152ebde-94a5-401a-a9c0-f2ca76c5a16e","Type":"ContainerDied","Data":"8ccbf4f51b7e7ab4530d8260caaa9673fb0aa778a19698ce9fe4059522c5e626"} Feb 16 14:20:23 crc kubenswrapper[4816]: I0216 14:20:23.153161 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.153128735 podStartE2EDuration="7.153128735s" podCreationTimestamp="2026-02-16 14:20:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:20:23.137242581 +0000 UTC m=+4622.463956319" watchObservedRunningTime="2026-02-16 14:20:23.153128735 +0000 UTC m=+4622.479842543" Feb 16 14:20:24 crc kubenswrapper[4816]: I0216 14:20:24.122986 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2152ebde-94a5-401a-a9c0-f2ca76c5a16e","Type":"ContainerStarted","Data":"1625210da39fffc69215efd5d61d7db8c5390450a49aa6dfd83166ed2edf3b50"} Feb 16 14:20:24 crc kubenswrapper[4816]: I0216 14:20:24.155768 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.155741822 podStartE2EDuration="7.155741822s" podCreationTimestamp="2026-02-16 14:20:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:20:24.147064626 +0000 UTC m=+4623.473778394" watchObservedRunningTime="2026-02-16 14:20:24.155741822 +0000 UTC m=+4623.482455590" Feb 16 14:20:24 crc kubenswrapper[4816]: I0216 14:20:24.943926 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.194461 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.245536 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-2jflt"] Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.245832 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" podUID="cf17fd2b-93cf-46fd-8ef5-1237eb707b64" containerName="dnsmasq-dns" containerID="cri-o://0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5" gracePeriod=10 Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.659532 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.771741 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-config\") pod \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.771886 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5ld4\" (UniqueName: \"kubernetes.io/projected/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-kube-api-access-m5ld4\") pod \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.771945 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-dns-svc\") pod \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\" (UID: \"cf17fd2b-93cf-46fd-8ef5-1237eb707b64\") " Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.776996 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-kube-api-access-m5ld4" (OuterVolumeSpecName: "kube-api-access-m5ld4") pod "cf17fd2b-93cf-46fd-8ef5-1237eb707b64" (UID: "cf17fd2b-93cf-46fd-8ef5-1237eb707b64"). InnerVolumeSpecName "kube-api-access-m5ld4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.804422 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-config" (OuterVolumeSpecName: "config") pod "cf17fd2b-93cf-46fd-8ef5-1237eb707b64" (UID: "cf17fd2b-93cf-46fd-8ef5-1237eb707b64"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.804527 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cf17fd2b-93cf-46fd-8ef5-1237eb707b64" (UID: "cf17fd2b-93cf-46fd-8ef5-1237eb707b64"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.873532 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5ld4\" (UniqueName: \"kubernetes.io/projected/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-kube-api-access-m5ld4\") on node \"crc\" DevicePath \"\"" Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.873562 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:20:25 crc kubenswrapper[4816]: I0216 14:20:25.873573 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf17fd2b-93cf-46fd-8ef5-1237eb707b64-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.144736 4816 generic.go:334] "Generic (PLEG): container finished" podID="cf17fd2b-93cf-46fd-8ef5-1237eb707b64" containerID="0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5" exitCode=0 Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.144810 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" event={"ID":"cf17fd2b-93cf-46fd-8ef5-1237eb707b64","Type":"ContainerDied","Data":"0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5"} Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.144870 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" event={"ID":"cf17fd2b-93cf-46fd-8ef5-1237eb707b64","Type":"ContainerDied","Data":"26f53e0eba41e5f2d564b854a0d2dcc168fd99f35ad6ec6c4daa0ac66fc127ef"} Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.144861 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-2jflt" Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.144895 4816 scope.go:117] "RemoveContainer" containerID="0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5" Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.174459 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-2jflt"] Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.175876 4816 scope.go:117] "RemoveContainer" containerID="406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a" Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.179982 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-2jflt"] Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.200180 4816 scope.go:117] "RemoveContainer" containerID="0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5" Feb 16 14:20:26 crc kubenswrapper[4816]: E0216 14:20:26.201778 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5\": container with ID starting with 0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5 not found: ID does not exist" containerID="0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5" Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.201833 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5"} err="failed to get container status \"0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5\": rpc error: code = NotFound desc = could not find container \"0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5\": container with ID starting with 0b241ac8a350c840cfd41a00773e7bac0d091b7bfe484f537aaf0fa2c6bc7cb5 not found: ID does not exist" Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.201869 4816 scope.go:117] "RemoveContainer" containerID="406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a" Feb 16 14:20:26 crc kubenswrapper[4816]: E0216 14:20:26.202300 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a\": container with ID starting with 406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a not found: ID does not exist" containerID="406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a" Feb 16 14:20:26 crc kubenswrapper[4816]: I0216 14:20:26.202347 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a"} err="failed to get container status \"406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a\": rpc error: code = NotFound desc = could not find container \"406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a\": container with ID starting with 406fd71b3a090510b8477b7801031acb40744ee7cd1651e1aadf1aa83629053a not found: ID does not exist" Feb 16 14:20:27 crc kubenswrapper[4816]: I0216 14:20:27.415137 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf17fd2b-93cf-46fd-8ef5-1237eb707b64" path="/var/lib/kubelet/pods/cf17fd2b-93cf-46fd-8ef5-1237eb707b64/volumes" Feb 16 14:20:27 crc kubenswrapper[4816]: I0216 14:20:27.542350 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 16 14:20:27 crc kubenswrapper[4816]: I0216 14:20:27.542872 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 16 14:20:27 crc kubenswrapper[4816]: I0216 14:20:27.833265 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 16 14:20:29 crc kubenswrapper[4816]: I0216 14:20:29.071407 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:29 crc kubenswrapper[4816]: I0216 14:20:29.071470 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:30 crc kubenswrapper[4816]: I0216 14:20:30.098942 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 16 14:20:30 crc kubenswrapper[4816]: I0216 14:20:30.198136 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 16 14:20:30 crc kubenswrapper[4816]: I0216 14:20:30.398640 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:20:30 crc kubenswrapper[4816]: E0216 14:20:30.398923 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:20:31 crc kubenswrapper[4816]: I0216 14:20:31.493419 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:31 crc kubenswrapper[4816]: I0216 14:20:31.589433 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.185891 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-xc5bb"] Feb 16 14:20:36 crc kubenswrapper[4816]: E0216 14:20:36.186762 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf17fd2b-93cf-46fd-8ef5-1237eb707b64" containerName="dnsmasq-dns" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.186778 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf17fd2b-93cf-46fd-8ef5-1237eb707b64" containerName="dnsmasq-dns" Feb 16 14:20:36 crc kubenswrapper[4816]: E0216 14:20:36.186797 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf17fd2b-93cf-46fd-8ef5-1237eb707b64" containerName="init" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.186805 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf17fd2b-93cf-46fd-8ef5-1237eb707b64" containerName="init" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.186958 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf17fd2b-93cf-46fd-8ef5-1237eb707b64" containerName="dnsmasq-dns" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.187544 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.189199 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.198847 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-xc5bb"] Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.249241 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/945242e2-8f31-48b9-8017-84016c4d7b7a-operator-scripts\") pod \"root-account-create-update-xc5bb\" (UID: \"945242e2-8f31-48b9-8017-84016c4d7b7a\") " pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.249308 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6js5\" (UniqueName: \"kubernetes.io/projected/945242e2-8f31-48b9-8017-84016c4d7b7a-kube-api-access-t6js5\") pod \"root-account-create-update-xc5bb\" (UID: \"945242e2-8f31-48b9-8017-84016c4d7b7a\") " pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.350435 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/945242e2-8f31-48b9-8017-84016c4d7b7a-operator-scripts\") pod \"root-account-create-update-xc5bb\" (UID: \"945242e2-8f31-48b9-8017-84016c4d7b7a\") " pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.350490 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6js5\" (UniqueName: \"kubernetes.io/projected/945242e2-8f31-48b9-8017-84016c4d7b7a-kube-api-access-t6js5\") pod \"root-account-create-update-xc5bb\" (UID: \"945242e2-8f31-48b9-8017-84016c4d7b7a\") " pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.351538 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/945242e2-8f31-48b9-8017-84016c4d7b7a-operator-scripts\") pod \"root-account-create-update-xc5bb\" (UID: \"945242e2-8f31-48b9-8017-84016c4d7b7a\") " pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.372892 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6js5\" (UniqueName: \"kubernetes.io/projected/945242e2-8f31-48b9-8017-84016c4d7b7a-kube-api-access-t6js5\") pod \"root-account-create-update-xc5bb\" (UID: \"945242e2-8f31-48b9-8017-84016c4d7b7a\") " pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.509327 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:36 crc kubenswrapper[4816]: I0216 14:20:36.962284 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-xc5bb"] Feb 16 14:20:37 crc kubenswrapper[4816]: I0216 14:20:37.237328 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-xc5bb" event={"ID":"945242e2-8f31-48b9-8017-84016c4d7b7a","Type":"ContainerStarted","Data":"1a07e5feff28041a7114033461c1b20ec881ef1f93bfa1c4b6e6a28d50259e01"} Feb 16 14:20:37 crc kubenswrapper[4816]: I0216 14:20:37.237704 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-xc5bb" event={"ID":"945242e2-8f31-48b9-8017-84016c4d7b7a","Type":"ContainerStarted","Data":"964c75b52ce50d3b258ab101d8ace7d0d8b7def77d46902c5aabb20d5d54ee38"} Feb 16 14:20:37 crc kubenswrapper[4816]: I0216 14:20:37.258120 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-xc5bb" podStartSLOduration=1.2580964190000001 podStartE2EDuration="1.258096419s" podCreationTimestamp="2026-02-16 14:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:20:37.248512958 +0000 UTC m=+4636.575226686" watchObservedRunningTime="2026-02-16 14:20:37.258096419 +0000 UTC m=+4636.584810147" Feb 16 14:20:38 crc kubenswrapper[4816]: I0216 14:20:38.244220 4816 generic.go:334] "Generic (PLEG): container finished" podID="945242e2-8f31-48b9-8017-84016c4d7b7a" containerID="1a07e5feff28041a7114033461c1b20ec881ef1f93bfa1c4b6e6a28d50259e01" exitCode=0 Feb 16 14:20:38 crc kubenswrapper[4816]: I0216 14:20:38.244280 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-xc5bb" event={"ID":"945242e2-8f31-48b9-8017-84016c4d7b7a","Type":"ContainerDied","Data":"1a07e5feff28041a7114033461c1b20ec881ef1f93bfa1c4b6e6a28d50259e01"} Feb 16 14:20:39 crc kubenswrapper[4816]: I0216 14:20:39.658692 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:39 crc kubenswrapper[4816]: I0216 14:20:39.727375 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6js5\" (UniqueName: \"kubernetes.io/projected/945242e2-8f31-48b9-8017-84016c4d7b7a-kube-api-access-t6js5\") pod \"945242e2-8f31-48b9-8017-84016c4d7b7a\" (UID: \"945242e2-8f31-48b9-8017-84016c4d7b7a\") " Feb 16 14:20:39 crc kubenswrapper[4816]: I0216 14:20:39.727528 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/945242e2-8f31-48b9-8017-84016c4d7b7a-operator-scripts\") pod \"945242e2-8f31-48b9-8017-84016c4d7b7a\" (UID: \"945242e2-8f31-48b9-8017-84016c4d7b7a\") " Feb 16 14:20:39 crc kubenswrapper[4816]: I0216 14:20:39.728730 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/945242e2-8f31-48b9-8017-84016c4d7b7a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "945242e2-8f31-48b9-8017-84016c4d7b7a" (UID: "945242e2-8f31-48b9-8017-84016c4d7b7a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:20:39 crc kubenswrapper[4816]: I0216 14:20:39.734858 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/945242e2-8f31-48b9-8017-84016c4d7b7a-kube-api-access-t6js5" (OuterVolumeSpecName: "kube-api-access-t6js5") pod "945242e2-8f31-48b9-8017-84016c4d7b7a" (UID: "945242e2-8f31-48b9-8017-84016c4d7b7a"). InnerVolumeSpecName "kube-api-access-t6js5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:20:39 crc kubenswrapper[4816]: I0216 14:20:39.829784 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6js5\" (UniqueName: \"kubernetes.io/projected/945242e2-8f31-48b9-8017-84016c4d7b7a-kube-api-access-t6js5\") on node \"crc\" DevicePath \"\"" Feb 16 14:20:39 crc kubenswrapper[4816]: I0216 14:20:39.829822 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/945242e2-8f31-48b9-8017-84016c4d7b7a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:20:40 crc kubenswrapper[4816]: I0216 14:20:40.261258 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-xc5bb" event={"ID":"945242e2-8f31-48b9-8017-84016c4d7b7a","Type":"ContainerDied","Data":"964c75b52ce50d3b258ab101d8ace7d0d8b7def77d46902c5aabb20d5d54ee38"} Feb 16 14:20:40 crc kubenswrapper[4816]: I0216 14:20:40.261306 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="964c75b52ce50d3b258ab101d8ace7d0d8b7def77d46902c5aabb20d5d54ee38" Feb 16 14:20:40 crc kubenswrapper[4816]: I0216 14:20:40.261362 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-xc5bb" Feb 16 14:20:42 crc kubenswrapper[4816]: I0216 14:20:42.398765 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:20:42 crc kubenswrapper[4816]: E0216 14:20:42.399358 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:20:42 crc kubenswrapper[4816]: I0216 14:20:42.699979 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-xc5bb"] Feb 16 14:20:42 crc kubenswrapper[4816]: I0216 14:20:42.709579 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-xc5bb"] Feb 16 14:20:43 crc kubenswrapper[4816]: I0216 14:20:43.412110 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="945242e2-8f31-48b9-8017-84016c4d7b7a" path="/var/lib/kubelet/pods/945242e2-8f31-48b9-8017-84016c4d7b7a/volumes" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.682547 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-zkvxp"] Feb 16 14:20:47 crc kubenswrapper[4816]: E0216 14:20:47.682992 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="945242e2-8f31-48b9-8017-84016c4d7b7a" containerName="mariadb-account-create-update" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.683017 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="945242e2-8f31-48b9-8017-84016c4d7b7a" containerName="mariadb-account-create-update" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.683183 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="945242e2-8f31-48b9-8017-84016c4d7b7a" containerName="mariadb-account-create-update" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.683714 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.685998 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.695697 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-zkvxp"] Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.815698 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8hfk\" (UniqueName: \"kubernetes.io/projected/d65c1feb-6642-43db-8a3d-35330f524335-kube-api-access-b8hfk\") pod \"root-account-create-update-zkvxp\" (UID: \"d65c1feb-6642-43db-8a3d-35330f524335\") " pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.815771 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d65c1feb-6642-43db-8a3d-35330f524335-operator-scripts\") pod \"root-account-create-update-zkvxp\" (UID: \"d65c1feb-6642-43db-8a3d-35330f524335\") " pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.916822 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d65c1feb-6642-43db-8a3d-35330f524335-operator-scripts\") pod \"root-account-create-update-zkvxp\" (UID: \"d65c1feb-6642-43db-8a3d-35330f524335\") " pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.916950 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8hfk\" (UniqueName: \"kubernetes.io/projected/d65c1feb-6642-43db-8a3d-35330f524335-kube-api-access-b8hfk\") pod \"root-account-create-update-zkvxp\" (UID: \"d65c1feb-6642-43db-8a3d-35330f524335\") " pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.917753 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d65c1feb-6642-43db-8a3d-35330f524335-operator-scripts\") pod \"root-account-create-update-zkvxp\" (UID: \"d65c1feb-6642-43db-8a3d-35330f524335\") " pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:47 crc kubenswrapper[4816]: I0216 14:20:47.935563 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8hfk\" (UniqueName: \"kubernetes.io/projected/d65c1feb-6642-43db-8a3d-35330f524335-kube-api-access-b8hfk\") pod \"root-account-create-update-zkvxp\" (UID: \"d65c1feb-6642-43db-8a3d-35330f524335\") " pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:48 crc kubenswrapper[4816]: I0216 14:20:48.010205 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:48 crc kubenswrapper[4816]: I0216 14:20:48.425472 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-zkvxp"] Feb 16 14:20:48 crc kubenswrapper[4816]: W0216 14:20:48.608323 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd65c1feb_6642_43db_8a3d_35330f524335.slice/crio-dd486657418fd61279378caa38dc64629ebd424ecf1ee7671257685c5c7c8b1e WatchSource:0}: Error finding container dd486657418fd61279378caa38dc64629ebd424ecf1ee7671257685c5c7c8b1e: Status 404 returned error can't find the container with id dd486657418fd61279378caa38dc64629ebd424ecf1ee7671257685c5c7c8b1e Feb 16 14:20:49 crc kubenswrapper[4816]: I0216 14:20:49.343441 4816 generic.go:334] "Generic (PLEG): container finished" podID="d65c1feb-6642-43db-8a3d-35330f524335" containerID="39f3a936c2cd03eeca933b81d4eb7ea9cf415d3b66d8a3beebe551cb3bdc8266" exitCode=0 Feb 16 14:20:49 crc kubenswrapper[4816]: I0216 14:20:49.343482 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zkvxp" event={"ID":"d65c1feb-6642-43db-8a3d-35330f524335","Type":"ContainerDied","Data":"39f3a936c2cd03eeca933b81d4eb7ea9cf415d3b66d8a3beebe551cb3bdc8266"} Feb 16 14:20:49 crc kubenswrapper[4816]: I0216 14:20:49.343523 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zkvxp" event={"ID":"d65c1feb-6642-43db-8a3d-35330f524335","Type":"ContainerStarted","Data":"dd486657418fd61279378caa38dc64629ebd424ecf1ee7671257685c5c7c8b1e"} Feb 16 14:20:50 crc kubenswrapper[4816]: I0216 14:20:50.730034 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:50 crc kubenswrapper[4816]: I0216 14:20:50.889865 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d65c1feb-6642-43db-8a3d-35330f524335-operator-scripts\") pod \"d65c1feb-6642-43db-8a3d-35330f524335\" (UID: \"d65c1feb-6642-43db-8a3d-35330f524335\") " Feb 16 14:20:50 crc kubenswrapper[4816]: I0216 14:20:50.890352 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8hfk\" (UniqueName: \"kubernetes.io/projected/d65c1feb-6642-43db-8a3d-35330f524335-kube-api-access-b8hfk\") pod \"d65c1feb-6642-43db-8a3d-35330f524335\" (UID: \"d65c1feb-6642-43db-8a3d-35330f524335\") " Feb 16 14:20:50 crc kubenswrapper[4816]: I0216 14:20:50.890505 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d65c1feb-6642-43db-8a3d-35330f524335-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d65c1feb-6642-43db-8a3d-35330f524335" (UID: "d65c1feb-6642-43db-8a3d-35330f524335"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:20:50 crc kubenswrapper[4816]: I0216 14:20:50.890685 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d65c1feb-6642-43db-8a3d-35330f524335-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:20:50 crc kubenswrapper[4816]: I0216 14:20:50.895360 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d65c1feb-6642-43db-8a3d-35330f524335-kube-api-access-b8hfk" (OuterVolumeSpecName: "kube-api-access-b8hfk") pod "d65c1feb-6642-43db-8a3d-35330f524335" (UID: "d65c1feb-6642-43db-8a3d-35330f524335"). InnerVolumeSpecName "kube-api-access-b8hfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:20:50 crc kubenswrapper[4816]: I0216 14:20:50.992492 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8hfk\" (UniqueName: \"kubernetes.io/projected/d65c1feb-6642-43db-8a3d-35330f524335-kube-api-access-b8hfk\") on node \"crc\" DevicePath \"\"" Feb 16 14:20:51 crc kubenswrapper[4816]: I0216 14:20:51.376737 4816 generic.go:334] "Generic (PLEG): container finished" podID="cc63654c-c1e8-4470-8345-57e5a576d246" containerID="d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4" exitCode=0 Feb 16 14:20:51 crc kubenswrapper[4816]: I0216 14:20:51.376847 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cc63654c-c1e8-4470-8345-57e5a576d246","Type":"ContainerDied","Data":"d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4"} Feb 16 14:20:51 crc kubenswrapper[4816]: I0216 14:20:51.378923 4816 generic.go:334] "Generic (PLEG): container finished" podID="bdb83503-25de-4204-b73d-b1c5fc3510d7" containerID="a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344" exitCode=0 Feb 16 14:20:51 crc kubenswrapper[4816]: I0216 14:20:51.379008 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bdb83503-25de-4204-b73d-b1c5fc3510d7","Type":"ContainerDied","Data":"a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344"} Feb 16 14:20:51 crc kubenswrapper[4816]: I0216 14:20:51.382237 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-zkvxp" event={"ID":"d65c1feb-6642-43db-8a3d-35330f524335","Type":"ContainerDied","Data":"dd486657418fd61279378caa38dc64629ebd424ecf1ee7671257685c5c7c8b1e"} Feb 16 14:20:51 crc kubenswrapper[4816]: I0216 14:20:51.382299 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zkvxp" Feb 16 14:20:51 crc kubenswrapper[4816]: I0216 14:20:51.382307 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd486657418fd61279378caa38dc64629ebd424ecf1ee7671257685c5c7c8b1e" Feb 16 14:20:52 crc kubenswrapper[4816]: I0216 14:20:52.391998 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cc63654c-c1e8-4470-8345-57e5a576d246","Type":"ContainerStarted","Data":"5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae"} Feb 16 14:20:52 crc kubenswrapper[4816]: I0216 14:20:52.394322 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bdb83503-25de-4204-b73d-b1c5fc3510d7","Type":"ContainerStarted","Data":"05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab"} Feb 16 14:20:52 crc kubenswrapper[4816]: I0216 14:20:52.394551 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:20:52 crc kubenswrapper[4816]: I0216 14:20:52.418374 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.418350406 podStartE2EDuration="38.418350406s" podCreationTimestamp="2026-02-16 14:20:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:20:52.416184017 +0000 UTC m=+4651.742897745" watchObservedRunningTime="2026-02-16 14:20:52.418350406 +0000 UTC m=+4651.745064134" Feb 16 14:20:52 crc kubenswrapper[4816]: I0216 14:20:52.442806 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.442782573 podStartE2EDuration="38.442782573s" podCreationTimestamp="2026-02-16 14:20:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:20:52.440501661 +0000 UTC m=+4651.767215399" watchObservedRunningTime="2026-02-16 14:20:52.442782573 +0000 UTC m=+4651.769496311" Feb 16 14:20:56 crc kubenswrapper[4816]: I0216 14:20:56.373639 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 16 14:20:57 crc kubenswrapper[4816]: I0216 14:20:57.399391 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:20:57 crc kubenswrapper[4816]: E0216 14:20:57.399693 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:21:06 crc kubenswrapper[4816]: I0216 14:21:06.375879 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 16 14:21:06 crc kubenswrapper[4816]: I0216 14:21:06.513853 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.199023 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-vd6pc"] Feb 16 14:21:10 crc kubenswrapper[4816]: E0216 14:21:10.199887 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d65c1feb-6642-43db-8a3d-35330f524335" containerName="mariadb-account-create-update" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.199907 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d65c1feb-6642-43db-8a3d-35330f524335" containerName="mariadb-account-create-update" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.200113 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d65c1feb-6642-43db-8a3d-35330f524335" containerName="mariadb-account-create-update" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.201006 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.215721 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-vd6pc"] Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.313551 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pknkw\" (UniqueName: \"kubernetes.io/projected/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-kube-api-access-pknkw\") pod \"dnsmasq-dns-5b7946d7b9-vd6pc\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.313616 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-vd6pc\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.313674 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-config\") pod \"dnsmasq-dns-5b7946d7b9-vd6pc\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.415886 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pknkw\" (UniqueName: \"kubernetes.io/projected/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-kube-api-access-pknkw\") pod \"dnsmasq-dns-5b7946d7b9-vd6pc\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.415943 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-vd6pc\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.415965 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-config\") pod \"dnsmasq-dns-5b7946d7b9-vd6pc\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.417087 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-config\") pod \"dnsmasq-dns-5b7946d7b9-vd6pc\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.417126 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-vd6pc\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.438266 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pknkw\" (UniqueName: \"kubernetes.io/projected/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-kube-api-access-pknkw\") pod \"dnsmasq-dns-5b7946d7b9-vd6pc\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.525191 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:10 crc kubenswrapper[4816]: I0216 14:21:10.807674 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 14:21:11 crc kubenswrapper[4816]: I0216 14:21:11.052015 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-vd6pc"] Feb 16 14:21:11 crc kubenswrapper[4816]: W0216 14:21:11.067770 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11dedb3d_4288_4a1d_b6e9_9f7066b9d00c.slice/crio-7fb0f1a9711fea0010f3617e78e7dfbda7f4468c16d0b3a7cb6d4848fcf8fea3 WatchSource:0}: Error finding container 7fb0f1a9711fea0010f3617e78e7dfbda7f4468c16d0b3a7cb6d4848fcf8fea3: Status 404 returned error can't find the container with id 7fb0f1a9711fea0010f3617e78e7dfbda7f4468c16d0b3a7cb6d4848fcf8fea3 Feb 16 14:21:11 crc kubenswrapper[4816]: I0216 14:21:11.405847 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:21:11 crc kubenswrapper[4816]: E0216 14:21:11.407718 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:21:11 crc kubenswrapper[4816]: I0216 14:21:11.547428 4816 generic.go:334] "Generic (PLEG): container finished" podID="11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" containerID="32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487" exitCode=0 Feb 16 14:21:11 crc kubenswrapper[4816]: I0216 14:21:11.547517 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" event={"ID":"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c","Type":"ContainerDied","Data":"32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487"} Feb 16 14:21:11 crc kubenswrapper[4816]: I0216 14:21:11.547894 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" event={"ID":"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c","Type":"ContainerStarted","Data":"7fb0f1a9711fea0010f3617e78e7dfbda7f4468c16d0b3a7cb6d4848fcf8fea3"} Feb 16 14:21:11 crc kubenswrapper[4816]: I0216 14:21:11.835059 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 14:21:12 crc kubenswrapper[4816]: I0216 14:21:12.555640 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" event={"ID":"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c","Type":"ContainerStarted","Data":"8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51"} Feb 16 14:21:12 crc kubenswrapper[4816]: I0216 14:21:12.556078 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:12 crc kubenswrapper[4816]: I0216 14:21:12.584082 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" podStartSLOduration=2.58404736 podStartE2EDuration="2.58404736s" podCreationTimestamp="2026-02-16 14:21:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:21:12.576159456 +0000 UTC m=+4671.902873184" watchObservedRunningTime="2026-02-16 14:21:12.58404736 +0000 UTC m=+4671.910761088" Feb 16 14:21:13 crc kubenswrapper[4816]: I0216 14:21:13.006084 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="cc63654c-c1e8-4470-8345-57e5a576d246" containerName="rabbitmq" containerID="cri-o://5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae" gracePeriod=604798 Feb 16 14:21:13 crc kubenswrapper[4816]: I0216 14:21:13.804689 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="bdb83503-25de-4204-b73d-b1c5fc3510d7" containerName="rabbitmq" containerID="cri-o://05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab" gracePeriod=604799 Feb 16 14:21:16 crc kubenswrapper[4816]: I0216 14:21:16.403638 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="cc63654c-c1e8-4470-8345-57e5a576d246" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.242:5672: connect: connection refused" Feb 16 14:21:16 crc kubenswrapper[4816]: I0216 14:21:16.511553 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="bdb83503-25de-4204-b73d-b1c5fc3510d7" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.243:5672: connect: connection refused" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.605746 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.658363 4816 generic.go:334] "Generic (PLEG): container finished" podID="cc63654c-c1e8-4470-8345-57e5a576d246" containerID="5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae" exitCode=0 Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.658436 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.658438 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cc63654c-c1e8-4470-8345-57e5a576d246","Type":"ContainerDied","Data":"5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae"} Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.658508 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cc63654c-c1e8-4470-8345-57e5a576d246","Type":"ContainerDied","Data":"e62e757917a91d9ce1dfb832fdda1327d26f74b03d739033c979e451c10f50bb"} Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.658532 4816 scope.go:117] "RemoveContainer" containerID="5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.687404 4816 scope.go:117] "RemoveContainer" containerID="d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.708953 4816 scope.go:117] "RemoveContainer" containerID="5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae" Feb 16 14:21:20 crc kubenswrapper[4816]: E0216 14:21:19.709539 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae\": container with ID starting with 5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae not found: ID does not exist" containerID="5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.709576 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae"} err="failed to get container status \"5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae\": rpc error: code = NotFound desc = could not find container \"5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae\": container with ID starting with 5084419b06cdba49ee796f8ad4e6749b31c6e4574b6f3707f47f48dea2af45ae not found: ID does not exist" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.709595 4816 scope.go:117] "RemoveContainer" containerID="d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4" Feb 16 14:21:20 crc kubenswrapper[4816]: E0216 14:21:19.709875 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4\": container with ID starting with d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4 not found: ID does not exist" containerID="d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.709898 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4"} err="failed to get container status \"d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4\": rpc error: code = NotFound desc = could not find container \"d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4\": container with ID starting with d6b78070aa6e206f28df980b5423f52b3e8e0d23e84096ec2ae48a5a3756a5b4 not found: ID does not exist" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795262 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-confd\") pod \"cc63654c-c1e8-4470-8345-57e5a576d246\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795393 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-plugins\") pod \"cc63654c-c1e8-4470-8345-57e5a576d246\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795548 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") pod \"cc63654c-c1e8-4470-8345-57e5a576d246\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795595 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rz225\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-kube-api-access-rz225\") pod \"cc63654c-c1e8-4470-8345-57e5a576d246\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795624 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc63654c-c1e8-4470-8345-57e5a576d246-pod-info\") pod \"cc63654c-c1e8-4470-8345-57e5a576d246\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795687 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-plugins-conf\") pod \"cc63654c-c1e8-4470-8345-57e5a576d246\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795724 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-erlang-cookie\") pod \"cc63654c-c1e8-4470-8345-57e5a576d246\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795752 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-server-conf\") pod \"cc63654c-c1e8-4470-8345-57e5a576d246\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795783 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc63654c-c1e8-4470-8345-57e5a576d246-erlang-cookie-secret\") pod \"cc63654c-c1e8-4470-8345-57e5a576d246\" (UID: \"cc63654c-c1e8-4470-8345-57e5a576d246\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.795776 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "cc63654c-c1e8-4470-8345-57e5a576d246" (UID: "cc63654c-c1e8-4470-8345-57e5a576d246"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.796126 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "cc63654c-c1e8-4470-8345-57e5a576d246" (UID: "cc63654c-c1e8-4470-8345-57e5a576d246"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.796143 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.796395 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "cc63654c-c1e8-4470-8345-57e5a576d246" (UID: "cc63654c-c1e8-4470-8345-57e5a576d246"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.801807 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc63654c-c1e8-4470-8345-57e5a576d246-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "cc63654c-c1e8-4470-8345-57e5a576d246" (UID: "cc63654c-c1e8-4470-8345-57e5a576d246"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.801807 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/cc63654c-c1e8-4470-8345-57e5a576d246-pod-info" (OuterVolumeSpecName: "pod-info") pod "cc63654c-c1e8-4470-8345-57e5a576d246" (UID: "cc63654c-c1e8-4470-8345-57e5a576d246"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.802580 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-kube-api-access-rz225" (OuterVolumeSpecName: "kube-api-access-rz225") pod "cc63654c-c1e8-4470-8345-57e5a576d246" (UID: "cc63654c-c1e8-4470-8345-57e5a576d246"). InnerVolumeSpecName "kube-api-access-rz225". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.808210 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa" (OuterVolumeSpecName: "persistence") pod "cc63654c-c1e8-4470-8345-57e5a576d246" (UID: "cc63654c-c1e8-4470-8345-57e5a576d246"). InnerVolumeSpecName "pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.818504 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-server-conf" (OuterVolumeSpecName: "server-conf") pod "cc63654c-c1e8-4470-8345-57e5a576d246" (UID: "cc63654c-c1e8-4470-8345-57e5a576d246"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.864068 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "cc63654c-c1e8-4470-8345-57e5a576d246" (UID: "cc63654c-c1e8-4470-8345-57e5a576d246"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.896898 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.896974 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") on node \"crc\" " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.896987 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rz225\" (UniqueName: \"kubernetes.io/projected/cc63654c-c1e8-4470-8345-57e5a576d246-kube-api-access-rz225\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.896999 4816 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cc63654c-c1e8-4470-8345-57e5a576d246-pod-info\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.897008 4816 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.897017 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cc63654c-c1e8-4470-8345-57e5a576d246-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.897025 4816 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cc63654c-c1e8-4470-8345-57e5a576d246-server-conf\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.897035 4816 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cc63654c-c1e8-4470-8345-57e5a576d246-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.911539 4816 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.911734 4816 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa") on node "crc" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:19.998022 4816 reconciler_common.go:293] "Volume detached for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.017798 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.027283 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.061828 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 14:21:20 crc kubenswrapper[4816]: E0216 14:21:20.062257 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc63654c-c1e8-4470-8345-57e5a576d246" containerName="setup-container" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.062286 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc63654c-c1e8-4470-8345-57e5a576d246" containerName="setup-container" Feb 16 14:21:20 crc kubenswrapper[4816]: E0216 14:21:20.062302 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc63654c-c1e8-4470-8345-57e5a576d246" containerName="rabbitmq" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.062310 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc63654c-c1e8-4470-8345-57e5a576d246" containerName="rabbitmq" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.062496 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc63654c-c1e8-4470-8345-57e5a576d246" containerName="rabbitmq" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.063526 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.066077 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.066120 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.066177 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-9zgn7" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.066091 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.066931 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.093882 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.200547 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/463a765f-a9c4-41c4-8198-4852beabb6df-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.200593 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/463a765f-a9c4-41c4-8198-4852beabb6df-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.200618 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.200905 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzvr8\" (UniqueName: \"kubernetes.io/projected/463a765f-a9c4-41c4-8198-4852beabb6df-kube-api-access-bzvr8\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.200991 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/463a765f-a9c4-41c4-8198-4852beabb6df-pod-info\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.201043 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/463a765f-a9c4-41c4-8198-4852beabb6df-server-conf\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.201069 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/463a765f-a9c4-41c4-8198-4852beabb6df-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.201098 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/463a765f-a9c4-41c4-8198-4852beabb6df-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.201186 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/463a765f-a9c4-41c4-8198-4852beabb6df-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.302916 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzvr8\" (UniqueName: \"kubernetes.io/projected/463a765f-a9c4-41c4-8198-4852beabb6df-kube-api-access-bzvr8\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.302983 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/463a765f-a9c4-41c4-8198-4852beabb6df-pod-info\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.303016 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/463a765f-a9c4-41c4-8198-4852beabb6df-server-conf\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.303038 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/463a765f-a9c4-41c4-8198-4852beabb6df-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.303057 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/463a765f-a9c4-41c4-8198-4852beabb6df-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.303099 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/463a765f-a9c4-41c4-8198-4852beabb6df-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.303149 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/463a765f-a9c4-41c4-8198-4852beabb6df-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.303173 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/463a765f-a9c4-41c4-8198-4852beabb6df-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.303199 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.304363 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/463a765f-a9c4-41c4-8198-4852beabb6df-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.304873 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/463a765f-a9c4-41c4-8198-4852beabb6df-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.306138 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.306178 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a5678448aeb9611164dd6454bd92d995887f86f00f3350ffea9ae651dc862e2c/globalmount\"" pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.309996 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/463a765f-a9c4-41c4-8198-4852beabb6df-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.310606 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/463a765f-a9c4-41c4-8198-4852beabb6df-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.311197 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/463a765f-a9c4-41c4-8198-4852beabb6df-pod-info\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.313859 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/463a765f-a9c4-41c4-8198-4852beabb6df-server-conf\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.317084 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/463a765f-a9c4-41c4-8198-4852beabb6df-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.365554 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzvr8\" (UniqueName: \"kubernetes.io/projected/463a765f-a9c4-41c4-8198-4852beabb6df-kube-api-access-bzvr8\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.376192 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-da984a9f-a293-4ba5-ace7-240dfc078aaa\") pod \"rabbitmq-server-0\" (UID: \"463a765f-a9c4-41c4-8198-4852beabb6df\") " pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.418715 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.424650 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.507211 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-plugins\") pod \"bdb83503-25de-4204-b73d-b1c5fc3510d7\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.507672 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-server-conf\") pod \"bdb83503-25de-4204-b73d-b1c5fc3510d7\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.507695 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-plugins-conf\") pod \"bdb83503-25de-4204-b73d-b1c5fc3510d7\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.507800 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bdb83503-25de-4204-b73d-b1c5fc3510d7-erlang-cookie-secret\") pod \"bdb83503-25de-4204-b73d-b1c5fc3510d7\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.507877 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "bdb83503-25de-4204-b73d-b1c5fc3510d7" (UID: "bdb83503-25de-4204-b73d-b1c5fc3510d7"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.507946 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") pod \"bdb83503-25de-4204-b73d-b1c5fc3510d7\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.507993 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-erlang-cookie\") pod \"bdb83503-25de-4204-b73d-b1c5fc3510d7\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.508023 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtff7\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-kube-api-access-gtff7\") pod \"bdb83503-25de-4204-b73d-b1c5fc3510d7\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.508063 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-confd\") pod \"bdb83503-25de-4204-b73d-b1c5fc3510d7\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.508089 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bdb83503-25de-4204-b73d-b1c5fc3510d7-pod-info\") pod \"bdb83503-25de-4204-b73d-b1c5fc3510d7\" (UID: \"bdb83503-25de-4204-b73d-b1c5fc3510d7\") " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.508486 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.508812 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "bdb83503-25de-4204-b73d-b1c5fc3510d7" (UID: "bdb83503-25de-4204-b73d-b1c5fc3510d7"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.509273 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "bdb83503-25de-4204-b73d-b1c5fc3510d7" (UID: "bdb83503-25de-4204-b73d-b1c5fc3510d7"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.512925 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdb83503-25de-4204-b73d-b1c5fc3510d7-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "bdb83503-25de-4204-b73d-b1c5fc3510d7" (UID: "bdb83503-25de-4204-b73d-b1c5fc3510d7"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.534146 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/bdb83503-25de-4204-b73d-b1c5fc3510d7-pod-info" (OuterVolumeSpecName: "pod-info") pod "bdb83503-25de-4204-b73d-b1c5fc3510d7" (UID: "bdb83503-25de-4204-b73d-b1c5fc3510d7"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.534349 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-server-conf" (OuterVolumeSpecName: "server-conf") pod "bdb83503-25de-4204-b73d-b1c5fc3510d7" (UID: "bdb83503-25de-4204-b73d-b1c5fc3510d7"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.537340 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.542948 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-kube-api-access-gtff7" (OuterVolumeSpecName: "kube-api-access-gtff7") pod "bdb83503-25de-4204-b73d-b1c5fc3510d7" (UID: "bdb83503-25de-4204-b73d-b1c5fc3510d7"). InnerVolumeSpecName "kube-api-access-gtff7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.604061 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d" (OuterVolumeSpecName: "persistence") pod "bdb83503-25de-4204-b73d-b1c5fc3510d7" (UID: "bdb83503-25de-4204-b73d-b1c5fc3510d7"). InnerVolumeSpecName "pvc-c556dcdf-d6f3-42c2-812a-013cd587560d". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.609792 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.609832 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtff7\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-kube-api-access-gtff7\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.609842 4816 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bdb83503-25de-4204-b73d-b1c5fc3510d7-pod-info\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.609851 4816 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-server-conf\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.609860 4816 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bdb83503-25de-4204-b73d-b1c5fc3510d7-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.609868 4816 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bdb83503-25de-4204-b73d-b1c5fc3510d7-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.609890 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") on node \"crc\" " Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.641995 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-7jzfh"] Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.642305 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" podUID="94ab64de-1d03-4d78-a575-5427a0108eb0" containerName="dnsmasq-dns" containerID="cri-o://b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53" gracePeriod=10 Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.686908 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "bdb83503-25de-4204-b73d-b1c5fc3510d7" (UID: "bdb83503-25de-4204-b73d-b1c5fc3510d7"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.694405 4816 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.694595 4816 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c556dcdf-d6f3-42c2-812a-013cd587560d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d") on node "crc" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.695375 4816 generic.go:334] "Generic (PLEG): container finished" podID="bdb83503-25de-4204-b73d-b1c5fc3510d7" containerID="05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab" exitCode=0 Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.695461 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bdb83503-25de-4204-b73d-b1c5fc3510d7","Type":"ContainerDied","Data":"05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab"} Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.695491 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"bdb83503-25de-4204-b73d-b1c5fc3510d7","Type":"ContainerDied","Data":"85356b904c74845a61416114e504f57855cbe892fc4cafb492d815804b2a9c1d"} Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.695509 4816 scope.go:117] "RemoveContainer" containerID="05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.695678 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.711261 4816 reconciler_common.go:293] "Volume detached for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.711286 4816 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bdb83503-25de-4204-b73d-b1c5fc3510d7-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.731876 4816 scope.go:117] "RemoveContainer" containerID="a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.766619 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.784362 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.798361 4816 scope.go:117] "RemoveContainer" containerID="05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab" Feb 16 14:21:20 crc kubenswrapper[4816]: E0216 14:21:20.800478 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab\": container with ID starting with 05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab not found: ID does not exist" containerID="05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.800506 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab"} err="failed to get container status \"05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab\": rpc error: code = NotFound desc = could not find container \"05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab\": container with ID starting with 05b4019eb8f355a5e9b369b7c5bf15025b06fba35b364eccb88839d5e6cab5ab not found: ID does not exist" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.800527 4816 scope.go:117] "RemoveContainer" containerID="a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344" Feb 16 14:21:20 crc kubenswrapper[4816]: E0216 14:21:20.800902 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344\": container with ID starting with a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344 not found: ID does not exist" containerID="a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.800937 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344"} err="failed to get container status \"a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344\": rpc error: code = NotFound desc = could not find container \"a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344\": container with ID starting with a2a1942f58ee06bc9744458b4d9af986090f5f055246e22ce22eb4e1bdd61344 not found: ID does not exist" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.808943 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 14:21:20 crc kubenswrapper[4816]: E0216 14:21:20.809245 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdb83503-25de-4204-b73d-b1c5fc3510d7" containerName="setup-container" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.809256 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdb83503-25de-4204-b73d-b1c5fc3510d7" containerName="setup-container" Feb 16 14:21:20 crc kubenswrapper[4816]: E0216 14:21:20.809275 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdb83503-25de-4204-b73d-b1c5fc3510d7" containerName="rabbitmq" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.809281 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdb83503-25de-4204-b73d-b1c5fc3510d7" containerName="rabbitmq" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.809413 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdb83503-25de-4204-b73d-b1c5fc3510d7" containerName="rabbitmq" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.810343 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.814688 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.814944 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.815236 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.815417 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.815524 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-985r5" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.843296 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.914971 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cd70d46-7198-421e-8082-95af01516a75-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.915056 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv65g\" (UniqueName: \"kubernetes.io/projected/6cd70d46-7198-421e-8082-95af01516a75-kube-api-access-rv65g\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.915099 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cd70d46-7198-421e-8082-95af01516a75-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.915156 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cd70d46-7198-421e-8082-95af01516a75-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.915222 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cd70d46-7198-421e-8082-95af01516a75-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.915260 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.915285 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cd70d46-7198-421e-8082-95af01516a75-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.915323 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cd70d46-7198-421e-8082-95af01516a75-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:20 crc kubenswrapper[4816]: I0216 14:21:20.915372 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cd70d46-7198-421e-8082-95af01516a75-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.016618 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cd70d46-7198-421e-8082-95af01516a75-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.016698 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.016722 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cd70d46-7198-421e-8082-95af01516a75-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.016750 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cd70d46-7198-421e-8082-95af01516a75-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.016787 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cd70d46-7198-421e-8082-95af01516a75-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.016815 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cd70d46-7198-421e-8082-95af01516a75-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.016845 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv65g\" (UniqueName: \"kubernetes.io/projected/6cd70d46-7198-421e-8082-95af01516a75-kube-api-access-rv65g\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.016873 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cd70d46-7198-421e-8082-95af01516a75-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.016909 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cd70d46-7198-421e-8082-95af01516a75-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.017371 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cd70d46-7198-421e-8082-95af01516a75-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.017369 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cd70d46-7198-421e-8082-95af01516a75-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.017838 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cd70d46-7198-421e-8082-95af01516a75-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.021013 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cd70d46-7198-421e-8082-95af01516a75-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.021164 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cd70d46-7198-421e-8082-95af01516a75-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.021451 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cd70d46-7198-421e-8082-95af01516a75-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.021841 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cd70d46-7198-421e-8082-95af01516a75-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.022097 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.022139 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2144d1a623b131c734fb9987db4b6b56c2cd53bd1708342ff09b0e1feffecd8a/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.034807 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv65g\" (UniqueName: \"kubernetes.io/projected/6cd70d46-7198-421e-8082-95af01516a75-kube-api-access-rv65g\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.057039 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c556dcdf-d6f3-42c2-812a-013cd587560d\") pod \"rabbitmq-cell1-server-0\" (UID: \"6cd70d46-7198-421e-8082-95af01516a75\") " pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.149558 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.222355 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.426584 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdb83503-25de-4204-b73d-b1c5fc3510d7" path="/var/lib/kubelet/pods/bdb83503-25de-4204-b73d-b1c5fc3510d7/volumes" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.427774 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc63654c-c1e8-4470-8345-57e5a576d246" path="/var/lib/kubelet/pods/cc63654c-c1e8-4470-8345-57e5a576d246/volumes" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.475010 4816 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podd65c1feb-6642-43db-8a3d-35330f524335"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podd65c1feb-6642-43db-8a3d-35330f524335] : Timed out while waiting for systemd to remove kubepods-besteffort-podd65c1feb_6642_43db_8a3d_35330f524335.slice" Feb 16 14:21:21 crc kubenswrapper[4816]: E0216 14:21:21.475063 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort podd65c1feb-6642-43db-8a3d-35330f524335] : unable to destroy cgroup paths for cgroup [kubepods besteffort podd65c1feb-6642-43db-8a3d-35330f524335] : Timed out while waiting for systemd to remove kubepods-besteffort-podd65c1feb_6642_43db_8a3d_35330f524335.slice" pod="openstack/root-account-create-update-zkvxp" podUID="d65c1feb-6642-43db-8a3d-35330f524335" Feb 16 14:21:21 crc kubenswrapper[4816]: W0216 14:21:21.615922 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cd70d46_7198_421e_8082_95af01516a75.slice/crio-d9b914c84a1a4d291f6d0f6fca637dce30ebfb0e8a4046eda85c6029ee0dc5e2 WatchSource:0}: Error finding container d9b914c84a1a4d291f6d0f6fca637dce30ebfb0e8a4046eda85c6029ee0dc5e2: Status 404 returned error can't find the container with id d9b914c84a1a4d291f6d0f6fca637dce30ebfb0e8a4046eda85c6029ee0dc5e2 Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.617398 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.675487 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.713218 4816 generic.go:334] "Generic (PLEG): container finished" podID="94ab64de-1d03-4d78-a575-5427a0108eb0" containerID="b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53" exitCode=0 Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.713272 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.713285 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" event={"ID":"94ab64de-1d03-4d78-a575-5427a0108eb0","Type":"ContainerDied","Data":"b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53"} Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.713311 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-7jzfh" event={"ID":"94ab64de-1d03-4d78-a575-5427a0108eb0","Type":"ContainerDied","Data":"0604b545c7837991f9d75ef6037863a025560c765dd80972e7e9c607105bcc38"} Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.713329 4816 scope.go:117] "RemoveContainer" containerID="b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.714880 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"463a765f-a9c4-41c4-8198-4852beabb6df","Type":"ContainerStarted","Data":"4a6c77e635843b7873c0d5f48d65689d5f69791ab670684b4bb52170542101c7"} Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.716128 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-zkvxp" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.716850 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6cd70d46-7198-421e-8082-95af01516a75","Type":"ContainerStarted","Data":"d9b914c84a1a4d291f6d0f6fca637dce30ebfb0e8a4046eda85c6029ee0dc5e2"} Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.726089 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-config\") pod \"94ab64de-1d03-4d78-a575-5427a0108eb0\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.726314 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-dns-svc\") pod \"94ab64de-1d03-4d78-a575-5427a0108eb0\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.726711 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/94ab64de-1d03-4d78-a575-5427a0108eb0-kube-api-access-b6gkz\") pod \"94ab64de-1d03-4d78-a575-5427a0108eb0\" (UID: \"94ab64de-1d03-4d78-a575-5427a0108eb0\") " Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.730772 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94ab64de-1d03-4d78-a575-5427a0108eb0-kube-api-access-b6gkz" (OuterVolumeSpecName: "kube-api-access-b6gkz") pod "94ab64de-1d03-4d78-a575-5427a0108eb0" (UID: "94ab64de-1d03-4d78-a575-5427a0108eb0"). InnerVolumeSpecName "kube-api-access-b6gkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.752944 4816 scope.go:117] "RemoveContainer" containerID="dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.767378 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "94ab64de-1d03-4d78-a575-5427a0108eb0" (UID: "94ab64de-1d03-4d78-a575-5427a0108eb0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.769425 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-config" (OuterVolumeSpecName: "config") pod "94ab64de-1d03-4d78-a575-5427a0108eb0" (UID: "94ab64de-1d03-4d78-a575-5427a0108eb0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.775403 4816 scope.go:117] "RemoveContainer" containerID="b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53" Feb 16 14:21:21 crc kubenswrapper[4816]: E0216 14:21:21.775813 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53\": container with ID starting with b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53 not found: ID does not exist" containerID="b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.775851 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53"} err="failed to get container status \"b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53\": rpc error: code = NotFound desc = could not find container \"b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53\": container with ID starting with b68bddf9e2e0d9f7add864ad0450b0add1043e6fa11d80546708b3eb08185b53 not found: ID does not exist" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.775877 4816 scope.go:117] "RemoveContainer" containerID="dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca" Feb 16 14:21:21 crc kubenswrapper[4816]: E0216 14:21:21.776109 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca\": container with ID starting with dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca not found: ID does not exist" containerID="dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.776141 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca"} err="failed to get container status \"dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca\": rpc error: code = NotFound desc = could not find container \"dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca\": container with ID starting with dd230ae27f03d2054ae09a2aaafab49ecaa8544dbefa2488a0132cb20b7fa1ca not found: ID does not exist" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.836573 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/94ab64de-1d03-4d78-a575-5427a0108eb0-kube-api-access-b6gkz\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.836619 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:21 crc kubenswrapper[4816]: I0216 14:21:21.836643 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94ab64de-1d03-4d78-a575-5427a0108eb0-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:21:22 crc kubenswrapper[4816]: I0216 14:21:22.045604 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-7jzfh"] Feb 16 14:21:22 crc kubenswrapper[4816]: I0216 14:21:22.053385 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-7jzfh"] Feb 16 14:21:23 crc kubenswrapper[4816]: I0216 14:21:23.398410 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:21:23 crc kubenswrapper[4816]: E0216 14:21:23.398906 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:21:23 crc kubenswrapper[4816]: I0216 14:21:23.408852 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94ab64de-1d03-4d78-a575-5427a0108eb0" path="/var/lib/kubelet/pods/94ab64de-1d03-4d78-a575-5427a0108eb0/volumes" Feb 16 14:21:23 crc kubenswrapper[4816]: I0216 14:21:23.742916 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"463a765f-a9c4-41c4-8198-4852beabb6df","Type":"ContainerStarted","Data":"c2923588f55f0ad5861cd0df4a9d7d5681d0272591bd904dfc34221a62cc1be8"} Feb 16 14:21:23 crc kubenswrapper[4816]: I0216 14:21:23.744775 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6cd70d46-7198-421e-8082-95af01516a75","Type":"ContainerStarted","Data":"1fa6672749d48abb1a34bd82b6e6543348569184ad09c12a9628f2c545258290"} Feb 16 14:21:34 crc kubenswrapper[4816]: I0216 14:21:34.399871 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:21:34 crc kubenswrapper[4816]: E0216 14:21:34.401838 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:21:48 crc kubenswrapper[4816]: I0216 14:21:48.399314 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:21:48 crc kubenswrapper[4816]: E0216 14:21:48.400620 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:21:55 crc kubenswrapper[4816]: I0216 14:21:55.000915 4816 generic.go:334] "Generic (PLEG): container finished" podID="463a765f-a9c4-41c4-8198-4852beabb6df" containerID="c2923588f55f0ad5861cd0df4a9d7d5681d0272591bd904dfc34221a62cc1be8" exitCode=0 Feb 16 14:21:55 crc kubenswrapper[4816]: I0216 14:21:55.001040 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"463a765f-a9c4-41c4-8198-4852beabb6df","Type":"ContainerDied","Data":"c2923588f55f0ad5861cd0df4a9d7d5681d0272591bd904dfc34221a62cc1be8"} Feb 16 14:21:56 crc kubenswrapper[4816]: I0216 14:21:56.010175 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"463a765f-a9c4-41c4-8198-4852beabb6df","Type":"ContainerStarted","Data":"dda103ae92d18107d4ddbe9ed60908d2644749ff4dece66ca509ee39a7f9bdf9"} Feb 16 14:21:56 crc kubenswrapper[4816]: I0216 14:21:56.010600 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 16 14:21:56 crc kubenswrapper[4816]: I0216 14:21:56.013401 4816 generic.go:334] "Generic (PLEG): container finished" podID="6cd70d46-7198-421e-8082-95af01516a75" containerID="1fa6672749d48abb1a34bd82b6e6543348569184ad09c12a9628f2c545258290" exitCode=0 Feb 16 14:21:56 crc kubenswrapper[4816]: I0216 14:21:56.013449 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6cd70d46-7198-421e-8082-95af01516a75","Type":"ContainerDied","Data":"1fa6672749d48abb1a34bd82b6e6543348569184ad09c12a9628f2c545258290"} Feb 16 14:21:56 crc kubenswrapper[4816]: I0216 14:21:56.085097 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.085068182 podStartE2EDuration="36.085068182s" podCreationTimestamp="2026-02-16 14:21:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:21:56.053994104 +0000 UTC m=+4715.380707832" watchObservedRunningTime="2026-02-16 14:21:56.085068182 +0000 UTC m=+4715.411781920" Feb 16 14:21:57 crc kubenswrapper[4816]: I0216 14:21:57.020984 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6cd70d46-7198-421e-8082-95af01516a75","Type":"ContainerStarted","Data":"04022601ff98ce756a2b413ac322ca2ae62fc7307f3248a7594fbb94c809010d"} Feb 16 14:21:57 crc kubenswrapper[4816]: I0216 14:21:57.021585 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:21:59 crc kubenswrapper[4816]: I0216 14:21:59.398847 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:21:59 crc kubenswrapper[4816]: E0216 14:21:59.399269 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:22:10 crc kubenswrapper[4816]: I0216 14:22:10.428336 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 16 14:22:10 crc kubenswrapper[4816]: I0216 14:22:10.450532 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=50.45051336 podStartE2EDuration="50.45051336s" podCreationTimestamp="2026-02-16 14:21:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:21:57.044002327 +0000 UTC m=+4716.370716055" watchObservedRunningTime="2026-02-16 14:22:10.45051336 +0000 UTC m=+4729.777227088" Feb 16 14:22:11 crc kubenswrapper[4816]: I0216 14:22:11.152914 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 16 14:22:12 crc kubenswrapper[4816]: I0216 14:22:12.399514 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:22:12 crc kubenswrapper[4816]: E0216 14:22:12.400711 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.255532 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Feb 16 14:22:18 crc kubenswrapper[4816]: E0216 14:22:18.256361 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94ab64de-1d03-4d78-a575-5427a0108eb0" containerName="init" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.256374 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="94ab64de-1d03-4d78-a575-5427a0108eb0" containerName="init" Feb 16 14:22:18 crc kubenswrapper[4816]: E0216 14:22:18.256384 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94ab64de-1d03-4d78-a575-5427a0108eb0" containerName="dnsmasq-dns" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.256390 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="94ab64de-1d03-4d78-a575-5427a0108eb0" containerName="dnsmasq-dns" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.256549 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="94ab64de-1d03-4d78-a575-5427a0108eb0" containerName="dnsmasq-dns" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.257087 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.259461 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-4ngll" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.267194 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.372337 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jldl\" (UniqueName: \"kubernetes.io/projected/d247792c-455e-4b5c-ad18-d8292b98d92f-kube-api-access-7jldl\") pod \"mariadb-client\" (UID: \"d247792c-455e-4b5c-ad18-d8292b98d92f\") " pod="openstack/mariadb-client" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.474167 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jldl\" (UniqueName: \"kubernetes.io/projected/d247792c-455e-4b5c-ad18-d8292b98d92f-kube-api-access-7jldl\") pod \"mariadb-client\" (UID: \"d247792c-455e-4b5c-ad18-d8292b98d92f\") " pod="openstack/mariadb-client" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.508974 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jldl\" (UniqueName: \"kubernetes.io/projected/d247792c-455e-4b5c-ad18-d8292b98d92f-kube-api-access-7jldl\") pod \"mariadb-client\" (UID: \"d247792c-455e-4b5c-ad18-d8292b98d92f\") " pod="openstack/mariadb-client" Feb 16 14:22:18 crc kubenswrapper[4816]: I0216 14:22:18.576936 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:22:19 crc kubenswrapper[4816]: I0216 14:22:19.085109 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:22:19 crc kubenswrapper[4816]: W0216 14:22:19.098984 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd247792c_455e_4b5c_ad18_d8292b98d92f.slice/crio-54f81f7011e440a70dd852e5f5a0afc759528d4273669aea865c92ea2c26d66e WatchSource:0}: Error finding container 54f81f7011e440a70dd852e5f5a0afc759528d4273669aea865c92ea2c26d66e: Status 404 returned error can't find the container with id 54f81f7011e440a70dd852e5f5a0afc759528d4273669aea865c92ea2c26d66e Feb 16 14:22:19 crc kubenswrapper[4816]: I0216 14:22:19.171599 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"d247792c-455e-4b5c-ad18-d8292b98d92f","Type":"ContainerStarted","Data":"54f81f7011e440a70dd852e5f5a0afc759528d4273669aea865c92ea2c26d66e"} Feb 16 14:22:20 crc kubenswrapper[4816]: I0216 14:22:20.179743 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"d247792c-455e-4b5c-ad18-d8292b98d92f","Type":"ContainerStarted","Data":"81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72"} Feb 16 14:22:20 crc kubenswrapper[4816]: I0216 14:22:20.200438 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client" podStartSLOduration=2.200407826 podStartE2EDuration="2.200407826s" podCreationTimestamp="2026-02-16 14:22:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:22:20.193774345 +0000 UTC m=+4739.520488073" watchObservedRunningTime="2026-02-16 14:22:20.200407826 +0000 UTC m=+4739.527121584" Feb 16 14:22:23 crc kubenswrapper[4816]: I0216 14:22:23.398857 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:22:23 crc kubenswrapper[4816]: E0216 14:22:23.399424 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:22:34 crc kubenswrapper[4816]: I0216 14:22:34.798000 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:22:34 crc kubenswrapper[4816]: I0216 14:22:34.798639 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-client" podUID="d247792c-455e-4b5c-ad18-d8292b98d92f" containerName="mariadb-client" containerID="cri-o://81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72" gracePeriod=30 Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.255038 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.299269 4816 generic.go:334] "Generic (PLEG): container finished" podID="d247792c-455e-4b5c-ad18-d8292b98d92f" containerID="81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72" exitCode=143 Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.299339 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.299358 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"d247792c-455e-4b5c-ad18-d8292b98d92f","Type":"ContainerDied","Data":"81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72"} Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.299708 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"d247792c-455e-4b5c-ad18-d8292b98d92f","Type":"ContainerDied","Data":"54f81f7011e440a70dd852e5f5a0afc759528d4273669aea865c92ea2c26d66e"} Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.299731 4816 scope.go:117] "RemoveContainer" containerID="81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72" Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.320576 4816 scope.go:117] "RemoveContainer" containerID="81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72" Feb 16 14:22:35 crc kubenswrapper[4816]: E0216 14:22:35.321283 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72\": container with ID starting with 81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72 not found: ID does not exist" containerID="81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72" Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.321393 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72"} err="failed to get container status \"81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72\": rpc error: code = NotFound desc = could not find container \"81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72\": container with ID starting with 81e095188b204709725d919f56a07464c773e7aa55d7d5835f92a7c7fc2ecd72 not found: ID does not exist" Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.330828 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jldl\" (UniqueName: \"kubernetes.io/projected/d247792c-455e-4b5c-ad18-d8292b98d92f-kube-api-access-7jldl\") pod \"d247792c-455e-4b5c-ad18-d8292b98d92f\" (UID: \"d247792c-455e-4b5c-ad18-d8292b98d92f\") " Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.340977 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d247792c-455e-4b5c-ad18-d8292b98d92f-kube-api-access-7jldl" (OuterVolumeSpecName: "kube-api-access-7jldl") pod "d247792c-455e-4b5c-ad18-d8292b98d92f" (UID: "d247792c-455e-4b5c-ad18-d8292b98d92f"). InnerVolumeSpecName "kube-api-access-7jldl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.433056 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jldl\" (UniqueName: \"kubernetes.io/projected/d247792c-455e-4b5c-ad18-d8292b98d92f-kube-api-access-7jldl\") on node \"crc\" DevicePath \"\"" Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.622436 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:22:35 crc kubenswrapper[4816]: I0216 14:22:35.630545 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:22:36 crc kubenswrapper[4816]: I0216 14:22:36.405977 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:22:36 crc kubenswrapper[4816]: E0216 14:22:36.406595 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:22:37 crc kubenswrapper[4816]: I0216 14:22:37.409859 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d247792c-455e-4b5c-ad18-d8292b98d92f" path="/var/lib/kubelet/pods/d247792c-455e-4b5c-ad18-d8292b98d92f/volumes" Feb 16 14:22:50 crc kubenswrapper[4816]: I0216 14:22:50.399063 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:22:50 crc kubenswrapper[4816]: E0216 14:22:50.400169 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.126915 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mhnhx"] Feb 16 14:22:54 crc kubenswrapper[4816]: E0216 14:22:54.127516 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d247792c-455e-4b5c-ad18-d8292b98d92f" containerName="mariadb-client" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.127531 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d247792c-455e-4b5c-ad18-d8292b98d92f" containerName="mariadb-client" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.127748 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d247792c-455e-4b5c-ad18-d8292b98d92f" containerName="mariadb-client" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.128843 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.151692 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mhnhx"] Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.261800 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-utilities\") pod \"community-operators-mhnhx\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.262197 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b56j\" (UniqueName: \"kubernetes.io/projected/ad342149-ddef-4c1b-9408-85ea241474c6-kube-api-access-7b56j\") pod \"community-operators-mhnhx\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.262438 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-catalog-content\") pod \"community-operators-mhnhx\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.363926 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-utilities\") pod \"community-operators-mhnhx\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.364027 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b56j\" (UniqueName: \"kubernetes.io/projected/ad342149-ddef-4c1b-9408-85ea241474c6-kube-api-access-7b56j\") pod \"community-operators-mhnhx\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.364088 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-catalog-content\") pod \"community-operators-mhnhx\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.364520 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-utilities\") pod \"community-operators-mhnhx\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.364591 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-catalog-content\") pod \"community-operators-mhnhx\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.397781 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b56j\" (UniqueName: \"kubernetes.io/projected/ad342149-ddef-4c1b-9408-85ea241474c6-kube-api-access-7b56j\") pod \"community-operators-mhnhx\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.498348 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:22:54 crc kubenswrapper[4816]: I0216 14:22:54.878766 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mhnhx"] Feb 16 14:22:55 crc kubenswrapper[4816]: I0216 14:22:55.450025 4816 generic.go:334] "Generic (PLEG): container finished" podID="ad342149-ddef-4c1b-9408-85ea241474c6" containerID="0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286" exitCode=0 Feb 16 14:22:55 crc kubenswrapper[4816]: I0216 14:22:55.450105 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhnhx" event={"ID":"ad342149-ddef-4c1b-9408-85ea241474c6","Type":"ContainerDied","Data":"0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286"} Feb 16 14:22:55 crc kubenswrapper[4816]: I0216 14:22:55.450134 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhnhx" event={"ID":"ad342149-ddef-4c1b-9408-85ea241474c6","Type":"ContainerStarted","Data":"ddfd017fafea9676ea1b1272359b0b2e6c936fa2b9853f6de5dee7bc1ae40b38"} Feb 16 14:22:55 crc kubenswrapper[4816]: I0216 14:22:55.452220 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 14:22:56 crc kubenswrapper[4816]: I0216 14:22:56.460408 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhnhx" event={"ID":"ad342149-ddef-4c1b-9408-85ea241474c6","Type":"ContainerStarted","Data":"ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f"} Feb 16 14:22:57 crc kubenswrapper[4816]: I0216 14:22:57.469486 4816 generic.go:334] "Generic (PLEG): container finished" podID="ad342149-ddef-4c1b-9408-85ea241474c6" containerID="ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f" exitCode=0 Feb 16 14:22:57 crc kubenswrapper[4816]: I0216 14:22:57.469541 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhnhx" event={"ID":"ad342149-ddef-4c1b-9408-85ea241474c6","Type":"ContainerDied","Data":"ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f"} Feb 16 14:22:58 crc kubenswrapper[4816]: I0216 14:22:58.478728 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhnhx" event={"ID":"ad342149-ddef-4c1b-9408-85ea241474c6","Type":"ContainerStarted","Data":"3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246"} Feb 16 14:22:58 crc kubenswrapper[4816]: I0216 14:22:58.498411 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mhnhx" podStartSLOduration=2.046708341 podStartE2EDuration="4.498391322s" podCreationTimestamp="2026-02-16 14:22:54 +0000 UTC" firstStartedPulling="2026-02-16 14:22:55.451738263 +0000 UTC m=+4774.778451991" lastFinishedPulling="2026-02-16 14:22:57.903421244 +0000 UTC m=+4777.230134972" observedRunningTime="2026-02-16 14:22:58.496269144 +0000 UTC m=+4777.822982882" watchObservedRunningTime="2026-02-16 14:22:58.498391322 +0000 UTC m=+4777.825105050" Feb 16 14:23:01 crc kubenswrapper[4816]: I0216 14:23:01.422877 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:23:01 crc kubenswrapper[4816]: E0216 14:23:01.423475 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:23:04 crc kubenswrapper[4816]: I0216 14:23:04.500001 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:23:04 crc kubenswrapper[4816]: I0216 14:23:04.500478 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:23:04 crc kubenswrapper[4816]: I0216 14:23:04.556992 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:23:04 crc kubenswrapper[4816]: I0216 14:23:04.612093 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:23:04 crc kubenswrapper[4816]: I0216 14:23:04.791905 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mhnhx"] Feb 16 14:23:06 crc kubenswrapper[4816]: I0216 14:23:06.541859 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mhnhx" podUID="ad342149-ddef-4c1b-9408-85ea241474c6" containerName="registry-server" containerID="cri-o://3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246" gracePeriod=2 Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.484953 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.554482 4816 generic.go:334] "Generic (PLEG): container finished" podID="ad342149-ddef-4c1b-9408-85ea241474c6" containerID="3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246" exitCode=0 Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.554524 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhnhx" event={"ID":"ad342149-ddef-4c1b-9408-85ea241474c6","Type":"ContainerDied","Data":"3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246"} Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.554535 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mhnhx" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.554552 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhnhx" event={"ID":"ad342149-ddef-4c1b-9408-85ea241474c6","Type":"ContainerDied","Data":"ddfd017fafea9676ea1b1272359b0b2e6c936fa2b9853f6de5dee7bc1ae40b38"} Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.554573 4816 scope.go:117] "RemoveContainer" containerID="3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.569767 4816 scope.go:117] "RemoveContainer" containerID="ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.590085 4816 scope.go:117] "RemoveContainer" containerID="0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.605062 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-catalog-content\") pod \"ad342149-ddef-4c1b-9408-85ea241474c6\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.605196 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b56j\" (UniqueName: \"kubernetes.io/projected/ad342149-ddef-4c1b-9408-85ea241474c6-kube-api-access-7b56j\") pod \"ad342149-ddef-4c1b-9408-85ea241474c6\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.605357 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-utilities\") pod \"ad342149-ddef-4c1b-9408-85ea241474c6\" (UID: \"ad342149-ddef-4c1b-9408-85ea241474c6\") " Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.606623 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-utilities" (OuterVolumeSpecName: "utilities") pod "ad342149-ddef-4c1b-9408-85ea241474c6" (UID: "ad342149-ddef-4c1b-9408-85ea241474c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.612928 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad342149-ddef-4c1b-9408-85ea241474c6-kube-api-access-7b56j" (OuterVolumeSpecName: "kube-api-access-7b56j") pod "ad342149-ddef-4c1b-9408-85ea241474c6" (UID: "ad342149-ddef-4c1b-9408-85ea241474c6"). InnerVolumeSpecName "kube-api-access-7b56j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.620607 4816 scope.go:117] "RemoveContainer" containerID="3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246" Feb 16 14:23:07 crc kubenswrapper[4816]: E0216 14:23:07.621156 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246\": container with ID starting with 3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246 not found: ID does not exist" containerID="3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.621206 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246"} err="failed to get container status \"3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246\": rpc error: code = NotFound desc = could not find container \"3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246\": container with ID starting with 3b0702effd71d0584350ee8b976d9e94dba5ccbda518ae02c5dacb404c73e246 not found: ID does not exist" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.621242 4816 scope.go:117] "RemoveContainer" containerID="ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f" Feb 16 14:23:07 crc kubenswrapper[4816]: E0216 14:23:07.621649 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f\": container with ID starting with ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f not found: ID does not exist" containerID="ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.621707 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f"} err="failed to get container status \"ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f\": rpc error: code = NotFound desc = could not find container \"ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f\": container with ID starting with ecff2796bb3f616b799986ad000e078e806df680549d1208f63fa2405b5c8d5f not found: ID does not exist" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.621736 4816 scope.go:117] "RemoveContainer" containerID="0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286" Feb 16 14:23:07 crc kubenswrapper[4816]: E0216 14:23:07.622045 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286\": container with ID starting with 0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286 not found: ID does not exist" containerID="0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.622073 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286"} err="failed to get container status \"0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286\": rpc error: code = NotFound desc = could not find container \"0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286\": container with ID starting with 0760eaa9079e94c598d9bb1efa8364a92b8cf4aa52535a7371ea20359148b286 not found: ID does not exist" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.669904 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad342149-ddef-4c1b-9408-85ea241474c6" (UID: "ad342149-ddef-4c1b-9408-85ea241474c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.707883 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.707922 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b56j\" (UniqueName: \"kubernetes.io/projected/ad342149-ddef-4c1b-9408-85ea241474c6-kube-api-access-7b56j\") on node \"crc\" DevicePath \"\"" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.707935 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad342149-ddef-4c1b-9408-85ea241474c6-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.889665 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mhnhx"] Feb 16 14:23:07 crc kubenswrapper[4816]: I0216 14:23:07.896138 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mhnhx"] Feb 16 14:23:09 crc kubenswrapper[4816]: I0216 14:23:09.408965 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad342149-ddef-4c1b-9408-85ea241474c6" path="/var/lib/kubelet/pods/ad342149-ddef-4c1b-9408-85ea241474c6/volumes" Feb 16 14:23:14 crc kubenswrapper[4816]: I0216 14:23:14.398970 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:23:14 crc kubenswrapper[4816]: E0216 14:23:14.399555 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:23:27 crc kubenswrapper[4816]: I0216 14:23:27.104044 4816 scope.go:117] "RemoveContainer" containerID="a7130ad6ee21cc3b21acc4654aa605582152cfb590c2ecb9b18e8a398fcacef3" Feb 16 14:23:28 crc kubenswrapper[4816]: I0216 14:23:28.398373 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:23:28 crc kubenswrapper[4816]: E0216 14:23:28.399087 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:23:39 crc kubenswrapper[4816]: I0216 14:23:39.400027 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:23:39 crc kubenswrapper[4816]: E0216 14:23:39.401108 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:23:50 crc kubenswrapper[4816]: I0216 14:23:50.399974 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:23:50 crc kubenswrapper[4816]: E0216 14:23:50.401693 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:24:01 crc kubenswrapper[4816]: I0216 14:24:01.408123 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:24:01 crc kubenswrapper[4816]: E0216 14:24:01.409090 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:24:12 crc kubenswrapper[4816]: I0216 14:24:12.399167 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:24:12 crc kubenswrapper[4816]: E0216 14:24:12.399994 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:24:27 crc kubenswrapper[4816]: I0216 14:24:27.399249 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:24:27 crc kubenswrapper[4816]: E0216 14:24:27.400534 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:24:38 crc kubenswrapper[4816]: I0216 14:24:38.398768 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:24:39 crc kubenswrapper[4816]: I0216 14:24:39.366614 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"90e2f0f25ce572784388a1d1c59ee443c4cd8305fe9cbe117e83babcb64c00c6"} Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.330920 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Feb 16 14:26:40 crc kubenswrapper[4816]: E0216 14:26:40.331727 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad342149-ddef-4c1b-9408-85ea241474c6" containerName="extract-content" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.331748 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad342149-ddef-4c1b-9408-85ea241474c6" containerName="extract-content" Feb 16 14:26:40 crc kubenswrapper[4816]: E0216 14:26:40.331781 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad342149-ddef-4c1b-9408-85ea241474c6" containerName="extract-utilities" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.331787 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad342149-ddef-4c1b-9408-85ea241474c6" containerName="extract-utilities" Feb 16 14:26:40 crc kubenswrapper[4816]: E0216 14:26:40.331797 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad342149-ddef-4c1b-9408-85ea241474c6" containerName="registry-server" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.331804 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad342149-ddef-4c1b-9408-85ea241474c6" containerName="registry-server" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.331944 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad342149-ddef-4c1b-9408-85ea241474c6" containerName="registry-server" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.332486 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.341209 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-4ngll" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.348461 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.509759 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh78j\" (UniqueName: \"kubernetes.io/projected/597a1a8c-07ae-48f5-a305-9db9b431ebf8-kube-api-access-sh78j\") pod \"mariadb-copy-data\" (UID: \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\") " pod="openstack/mariadb-copy-data" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.510103 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\") pod \"mariadb-copy-data\" (UID: \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\") " pod="openstack/mariadb-copy-data" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.611633 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\") pod \"mariadb-copy-data\" (UID: \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\") " pod="openstack/mariadb-copy-data" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.611756 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh78j\" (UniqueName: \"kubernetes.io/projected/597a1a8c-07ae-48f5-a305-9db9b431ebf8-kube-api-access-sh78j\") pod \"mariadb-copy-data\" (UID: \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\") " pod="openstack/mariadb-copy-data" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.614909 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.614960 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\") pod \"mariadb-copy-data\" (UID: \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/635d2a3bb9eb26887d1612086e8e317f559de150f7f1c0647bf34cab31d656e6/globalmount\"" pod="openstack/mariadb-copy-data" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.637341 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh78j\" (UniqueName: \"kubernetes.io/projected/597a1a8c-07ae-48f5-a305-9db9b431ebf8-kube-api-access-sh78j\") pod \"mariadb-copy-data\" (UID: \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\") " pod="openstack/mariadb-copy-data" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.645434 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\") pod \"mariadb-copy-data\" (UID: \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\") " pod="openstack/mariadb-copy-data" Feb 16 14:26:40 crc kubenswrapper[4816]: I0216 14:26:40.698430 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Feb 16 14:26:41 crc kubenswrapper[4816]: I0216 14:26:41.329980 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Feb 16 14:26:41 crc kubenswrapper[4816]: I0216 14:26:41.409527 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"597a1a8c-07ae-48f5-a305-9db9b431ebf8","Type":"ContainerStarted","Data":"786b8639a7a1e12ddb483deb22a0baa61928b893ae03e54e6f9cf029edb34078"} Feb 16 14:26:42 crc kubenswrapper[4816]: I0216 14:26:42.420156 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"597a1a8c-07ae-48f5-a305-9db9b431ebf8","Type":"ContainerStarted","Data":"9f63bd45b1595ea6623a86e08a4a2096b4004637fea2dd24f46bf1ba93d76494"} Feb 16 14:26:42 crc kubenswrapper[4816]: I0216 14:26:42.446547 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.446512521 podStartE2EDuration="3.446512521s" podCreationTimestamp="2026-02-16 14:26:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:26:42.436817037 +0000 UTC m=+5001.763530795" watchObservedRunningTime="2026-02-16 14:26:42.446512521 +0000 UTC m=+5001.773226249" Feb 16 14:26:45 crc kubenswrapper[4816]: I0216 14:26:45.039586 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:45 crc kubenswrapper[4816]: I0216 14:26:45.042324 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:26:45 crc kubenswrapper[4816]: I0216 14:26:45.048986 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:45 crc kubenswrapper[4816]: I0216 14:26:45.175342 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpjkq\" (UniqueName: \"kubernetes.io/projected/cd5c0c5b-ed89-4552-a556-973c6fef61e7-kube-api-access-rpjkq\") pod \"mariadb-client\" (UID: \"cd5c0c5b-ed89-4552-a556-973c6fef61e7\") " pod="openstack/mariadb-client" Feb 16 14:26:45 crc kubenswrapper[4816]: I0216 14:26:45.277248 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpjkq\" (UniqueName: \"kubernetes.io/projected/cd5c0c5b-ed89-4552-a556-973c6fef61e7-kube-api-access-rpjkq\") pod \"mariadb-client\" (UID: \"cd5c0c5b-ed89-4552-a556-973c6fef61e7\") " pod="openstack/mariadb-client" Feb 16 14:26:45 crc kubenswrapper[4816]: I0216 14:26:45.301340 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpjkq\" (UniqueName: \"kubernetes.io/projected/cd5c0c5b-ed89-4552-a556-973c6fef61e7-kube-api-access-rpjkq\") pod \"mariadb-client\" (UID: \"cd5c0c5b-ed89-4552-a556-973c6fef61e7\") " pod="openstack/mariadb-client" Feb 16 14:26:45 crc kubenswrapper[4816]: I0216 14:26:45.370568 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:26:45 crc kubenswrapper[4816]: I0216 14:26:45.810443 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:46 crc kubenswrapper[4816]: I0216 14:26:46.447075 4816 generic.go:334] "Generic (PLEG): container finished" podID="cd5c0c5b-ed89-4552-a556-973c6fef61e7" containerID="3f6e033f6b916fe2be86734750d3e3adb800d0322bdf69273151346bc631866b" exitCode=0 Feb 16 14:26:46 crc kubenswrapper[4816]: I0216 14:26:46.447181 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"cd5c0c5b-ed89-4552-a556-973c6fef61e7","Type":"ContainerDied","Data":"3f6e033f6b916fe2be86734750d3e3adb800d0322bdf69273151346bc631866b"} Feb 16 14:26:46 crc kubenswrapper[4816]: I0216 14:26:46.447361 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"cd5c0c5b-ed89-4552-a556-973c6fef61e7","Type":"ContainerStarted","Data":"61ccd940ef65790f0d7ee797c23aacf0305f1a19b3d633c713d6297d5a713c72"} Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.753897 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.777910 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_cd5c0c5b-ed89-4552-a556-973c6fef61e7/mariadb-client/0.log" Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.805600 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.813396 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.946079 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpjkq\" (UniqueName: \"kubernetes.io/projected/cd5c0c5b-ed89-4552-a556-973c6fef61e7-kube-api-access-rpjkq\") pod \"cd5c0c5b-ed89-4552-a556-973c6fef61e7\" (UID: \"cd5c0c5b-ed89-4552-a556-973c6fef61e7\") " Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.954981 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd5c0c5b-ed89-4552-a556-973c6fef61e7-kube-api-access-rpjkq" (OuterVolumeSpecName: "kube-api-access-rpjkq") pod "cd5c0c5b-ed89-4552-a556-973c6fef61e7" (UID: "cd5c0c5b-ed89-4552-a556-973c6fef61e7"). InnerVolumeSpecName "kube-api-access-rpjkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.981184 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:47 crc kubenswrapper[4816]: E0216 14:26:47.981719 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd5c0c5b-ed89-4552-a556-973c6fef61e7" containerName="mariadb-client" Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.981749 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd5c0c5b-ed89-4552-a556-973c6fef61e7" containerName="mariadb-client" Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.981925 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd5c0c5b-ed89-4552-a556-973c6fef61e7" containerName="mariadb-client" Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.982566 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:26:47 crc kubenswrapper[4816]: I0216 14:26:47.996866 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:48 crc kubenswrapper[4816]: I0216 14:26:48.047556 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpjkq\" (UniqueName: \"kubernetes.io/projected/cd5c0c5b-ed89-4552-a556-973c6fef61e7-kube-api-access-rpjkq\") on node \"crc\" DevicePath \"\"" Feb 16 14:26:48 crc kubenswrapper[4816]: I0216 14:26:48.149282 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz9rr\" (UniqueName: \"kubernetes.io/projected/80b89765-91c0-4d0b-8c04-8fbb7ddc61d2-kube-api-access-jz9rr\") pod \"mariadb-client\" (UID: \"80b89765-91c0-4d0b-8c04-8fbb7ddc61d2\") " pod="openstack/mariadb-client" Feb 16 14:26:48 crc kubenswrapper[4816]: I0216 14:26:48.250333 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz9rr\" (UniqueName: \"kubernetes.io/projected/80b89765-91c0-4d0b-8c04-8fbb7ddc61d2-kube-api-access-jz9rr\") pod \"mariadb-client\" (UID: \"80b89765-91c0-4d0b-8c04-8fbb7ddc61d2\") " pod="openstack/mariadb-client" Feb 16 14:26:48 crc kubenswrapper[4816]: I0216 14:26:48.266028 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz9rr\" (UniqueName: \"kubernetes.io/projected/80b89765-91c0-4d0b-8c04-8fbb7ddc61d2-kube-api-access-jz9rr\") pod \"mariadb-client\" (UID: \"80b89765-91c0-4d0b-8c04-8fbb7ddc61d2\") " pod="openstack/mariadb-client" Feb 16 14:26:48 crc kubenswrapper[4816]: I0216 14:26:48.303768 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:26:48 crc kubenswrapper[4816]: I0216 14:26:48.464712 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61ccd940ef65790f0d7ee797c23aacf0305f1a19b3d633c713d6297d5a713c72" Feb 16 14:26:48 crc kubenswrapper[4816]: I0216 14:26:48.464982 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:26:48 crc kubenswrapper[4816]: I0216 14:26:48.481326 4816 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="cd5c0c5b-ed89-4552-a556-973c6fef61e7" podUID="80b89765-91c0-4d0b-8c04-8fbb7ddc61d2" Feb 16 14:26:48 crc kubenswrapper[4816]: I0216 14:26:48.510166 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:49 crc kubenswrapper[4816]: I0216 14:26:49.411027 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd5c0c5b-ed89-4552-a556-973c6fef61e7" path="/var/lib/kubelet/pods/cd5c0c5b-ed89-4552-a556-973c6fef61e7/volumes" Feb 16 14:26:49 crc kubenswrapper[4816]: I0216 14:26:49.474970 4816 generic.go:334] "Generic (PLEG): container finished" podID="80b89765-91c0-4d0b-8c04-8fbb7ddc61d2" containerID="5be9c10bddbc0029710169621e737bdc2f025bc072b7d1d927f35f202bfb7d14" exitCode=0 Feb 16 14:26:49 crc kubenswrapper[4816]: I0216 14:26:49.475033 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"80b89765-91c0-4d0b-8c04-8fbb7ddc61d2","Type":"ContainerDied","Data":"5be9c10bddbc0029710169621e737bdc2f025bc072b7d1d927f35f202bfb7d14"} Feb 16 14:26:49 crc kubenswrapper[4816]: I0216 14:26:49.475108 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"80b89765-91c0-4d0b-8c04-8fbb7ddc61d2","Type":"ContainerStarted","Data":"69cc03d132d4b9fcb64b448b6178693d010dd3b7c09d13fb6814cc4dd9eff9dd"} Feb 16 14:26:50 crc kubenswrapper[4816]: I0216 14:26:50.780976 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:26:50 crc kubenswrapper[4816]: I0216 14:26:50.801506 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_80b89765-91c0-4d0b-8c04-8fbb7ddc61d2/mariadb-client/0.log" Feb 16 14:26:50 crc kubenswrapper[4816]: I0216 14:26:50.828452 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:50 crc kubenswrapper[4816]: I0216 14:26:50.835439 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Feb 16 14:26:50 crc kubenswrapper[4816]: I0216 14:26:50.934488 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz9rr\" (UniqueName: \"kubernetes.io/projected/80b89765-91c0-4d0b-8c04-8fbb7ddc61d2-kube-api-access-jz9rr\") pod \"80b89765-91c0-4d0b-8c04-8fbb7ddc61d2\" (UID: \"80b89765-91c0-4d0b-8c04-8fbb7ddc61d2\") " Feb 16 14:26:50 crc kubenswrapper[4816]: I0216 14:26:50.948874 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b89765-91c0-4d0b-8c04-8fbb7ddc61d2-kube-api-access-jz9rr" (OuterVolumeSpecName: "kube-api-access-jz9rr") pod "80b89765-91c0-4d0b-8c04-8fbb7ddc61d2" (UID: "80b89765-91c0-4d0b-8c04-8fbb7ddc61d2"). InnerVolumeSpecName "kube-api-access-jz9rr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:26:51 crc kubenswrapper[4816]: I0216 14:26:51.036628 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz9rr\" (UniqueName: \"kubernetes.io/projected/80b89765-91c0-4d0b-8c04-8fbb7ddc61d2-kube-api-access-jz9rr\") on node \"crc\" DevicePath \"\"" Feb 16 14:26:51 crc kubenswrapper[4816]: I0216 14:26:51.407778 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80b89765-91c0-4d0b-8c04-8fbb7ddc61d2" path="/var/lib/kubelet/pods/80b89765-91c0-4d0b-8c04-8fbb7ddc61d2/volumes" Feb 16 14:26:51 crc kubenswrapper[4816]: I0216 14:26:51.491562 4816 scope.go:117] "RemoveContainer" containerID="5be9c10bddbc0029710169621e737bdc2f025bc072b7d1d927f35f202bfb7d14" Feb 16 14:26:51 crc kubenswrapper[4816]: I0216 14:26:51.491610 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.157089 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kdl4c"] Feb 16 14:27:02 crc kubenswrapper[4816]: E0216 14:27:02.157910 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b89765-91c0-4d0b-8c04-8fbb7ddc61d2" containerName="mariadb-client" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.157921 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b89765-91c0-4d0b-8c04-8fbb7ddc61d2" containerName="mariadb-client" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.158067 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b89765-91c0-4d0b-8c04-8fbb7ddc61d2" containerName="mariadb-client" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.159384 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.188529 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdl4c"] Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.205086 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk7ds\" (UniqueName: \"kubernetes.io/projected/b08ad524-8e6f-4729-98fe-d8ab42024727-kube-api-access-vk7ds\") pod \"redhat-marketplace-kdl4c\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.205197 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-utilities\") pod \"redhat-marketplace-kdl4c\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.205268 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-catalog-content\") pod \"redhat-marketplace-kdl4c\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.306313 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-catalog-content\") pod \"redhat-marketplace-kdl4c\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.306420 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk7ds\" (UniqueName: \"kubernetes.io/projected/b08ad524-8e6f-4729-98fe-d8ab42024727-kube-api-access-vk7ds\") pod \"redhat-marketplace-kdl4c\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.306455 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-utilities\") pod \"redhat-marketplace-kdl4c\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.306946 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-catalog-content\") pod \"redhat-marketplace-kdl4c\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.307001 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-utilities\") pod \"redhat-marketplace-kdl4c\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.343506 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk7ds\" (UniqueName: \"kubernetes.io/projected/b08ad524-8e6f-4729-98fe-d8ab42024727-kube-api-access-vk7ds\") pod \"redhat-marketplace-kdl4c\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.479210 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:02 crc kubenswrapper[4816]: I0216 14:27:02.732761 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdl4c"] Feb 16 14:27:03 crc kubenswrapper[4816]: I0216 14:27:03.597147 4816 generic.go:334] "Generic (PLEG): container finished" podID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerID="63a6216ae9a2ad87b2396d33610ddf4025e76e857422d22df98358a70156c7a4" exitCode=0 Feb 16 14:27:03 crc kubenswrapper[4816]: I0216 14:27:03.597656 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdl4c" event={"ID":"b08ad524-8e6f-4729-98fe-d8ab42024727","Type":"ContainerDied","Data":"63a6216ae9a2ad87b2396d33610ddf4025e76e857422d22df98358a70156c7a4"} Feb 16 14:27:03 crc kubenswrapper[4816]: I0216 14:27:03.597710 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdl4c" event={"ID":"b08ad524-8e6f-4729-98fe-d8ab42024727","Type":"ContainerStarted","Data":"a8ebffcd8f7929c22443c8eab08b8bc8c7100a085c4370560a906130d5bb22a8"} Feb 16 14:27:05 crc kubenswrapper[4816]: I0216 14:27:05.613209 4816 generic.go:334] "Generic (PLEG): container finished" podID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerID="9c4bed88a7d8b0c266313a96e2f8178a6877276659125316923cbc38a6e1e2b8" exitCode=0 Feb 16 14:27:05 crc kubenswrapper[4816]: I0216 14:27:05.613307 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdl4c" event={"ID":"b08ad524-8e6f-4729-98fe-d8ab42024727","Type":"ContainerDied","Data":"9c4bed88a7d8b0c266313a96e2f8178a6877276659125316923cbc38a6e1e2b8"} Feb 16 14:27:06 crc kubenswrapper[4816]: I0216 14:27:06.623527 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdl4c" event={"ID":"b08ad524-8e6f-4729-98fe-d8ab42024727","Type":"ContainerStarted","Data":"d8262534727a5b1c5204e65b128d73bb8ea775346a34e9ed18904be3eae5f5c6"} Feb 16 14:27:06 crc kubenswrapper[4816]: I0216 14:27:06.652395 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kdl4c" podStartSLOduration=2.205123124 podStartE2EDuration="4.652369105s" podCreationTimestamp="2026-02-16 14:27:02 +0000 UTC" firstStartedPulling="2026-02-16 14:27:03.5993084 +0000 UTC m=+5022.926022128" lastFinishedPulling="2026-02-16 14:27:06.046554331 +0000 UTC m=+5025.373268109" observedRunningTime="2026-02-16 14:27:06.648995773 +0000 UTC m=+5025.975709501" watchObservedRunningTime="2026-02-16 14:27:06.652369105 +0000 UTC m=+5025.979082863" Feb 16 14:27:06 crc kubenswrapper[4816]: I0216 14:27:06.940599 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:27:06 crc kubenswrapper[4816]: I0216 14:27:06.940692 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.337391 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-55kz7"] Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.339386 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.357357 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-55kz7"] Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.538688 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-utilities\") pod \"redhat-operators-55kz7\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.538811 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-catalog-content\") pod \"redhat-operators-55kz7\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.538858 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlkxm\" (UniqueName: \"kubernetes.io/projected/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-kube-api-access-dlkxm\") pod \"redhat-operators-55kz7\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.640334 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-utilities\") pod \"redhat-operators-55kz7\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.641202 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-utilities\") pod \"redhat-operators-55kz7\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.641458 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-catalog-content\") pod \"redhat-operators-55kz7\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.641939 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-catalog-content\") pod \"redhat-operators-55kz7\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.642113 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlkxm\" (UniqueName: \"kubernetes.io/projected/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-kube-api-access-dlkxm\") pod \"redhat-operators-55kz7\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.664850 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlkxm\" (UniqueName: \"kubernetes.io/projected/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-kube-api-access-dlkxm\") pod \"redhat-operators-55kz7\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:08 crc kubenswrapper[4816]: I0216 14:27:08.959569 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:09 crc kubenswrapper[4816]: I0216 14:27:09.449889 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-55kz7"] Feb 16 14:27:09 crc kubenswrapper[4816]: W0216 14:27:09.464776 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd54e1b2_d81c_4602_a8b5_3dbe898093ed.slice/crio-4d25b72707e7cc65065d020e956c04ee51249ca42834f26bef350c8b20d5b38d WatchSource:0}: Error finding container 4d25b72707e7cc65065d020e956c04ee51249ca42834f26bef350c8b20d5b38d: Status 404 returned error can't find the container with id 4d25b72707e7cc65065d020e956c04ee51249ca42834f26bef350c8b20d5b38d Feb 16 14:27:09 crc kubenswrapper[4816]: I0216 14:27:09.643985 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-55kz7" event={"ID":"bd54e1b2-d81c-4602-a8b5-3dbe898093ed","Type":"ContainerStarted","Data":"4d25b72707e7cc65065d020e956c04ee51249ca42834f26bef350c8b20d5b38d"} Feb 16 14:27:10 crc kubenswrapper[4816]: I0216 14:27:10.655437 4816 generic.go:334] "Generic (PLEG): container finished" podID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerID="a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197" exitCode=0 Feb 16 14:27:10 crc kubenswrapper[4816]: I0216 14:27:10.655541 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-55kz7" event={"ID":"bd54e1b2-d81c-4602-a8b5-3dbe898093ed","Type":"ContainerDied","Data":"a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197"} Feb 16 14:27:11 crc kubenswrapper[4816]: I0216 14:27:11.663992 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-55kz7" event={"ID":"bd54e1b2-d81c-4602-a8b5-3dbe898093ed","Type":"ContainerStarted","Data":"422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af"} Feb 16 14:27:12 crc kubenswrapper[4816]: I0216 14:27:12.480246 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:12 crc kubenswrapper[4816]: I0216 14:27:12.480943 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:12 crc kubenswrapper[4816]: I0216 14:27:12.552457 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:12 crc kubenswrapper[4816]: I0216 14:27:12.673448 4816 generic.go:334] "Generic (PLEG): container finished" podID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerID="422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af" exitCode=0 Feb 16 14:27:12 crc kubenswrapper[4816]: I0216 14:27:12.674673 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-55kz7" event={"ID":"bd54e1b2-d81c-4602-a8b5-3dbe898093ed","Type":"ContainerDied","Data":"422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af"} Feb 16 14:27:12 crc kubenswrapper[4816]: I0216 14:27:12.720569 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:13 crc kubenswrapper[4816]: I0216 14:27:13.680919 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-55kz7" event={"ID":"bd54e1b2-d81c-4602-a8b5-3dbe898093ed","Type":"ContainerStarted","Data":"54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d"} Feb 16 14:27:13 crc kubenswrapper[4816]: I0216 14:27:13.705051 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-55kz7" podStartSLOduration=3.272318667 podStartE2EDuration="5.705028652s" podCreationTimestamp="2026-02-16 14:27:08 +0000 UTC" firstStartedPulling="2026-02-16 14:27:10.657530108 +0000 UTC m=+5029.984243836" lastFinishedPulling="2026-02-16 14:27:13.090240093 +0000 UTC m=+5032.416953821" observedRunningTime="2026-02-16 14:27:13.700133839 +0000 UTC m=+5033.026847567" watchObservedRunningTime="2026-02-16 14:27:13.705028652 +0000 UTC m=+5033.031742390" Feb 16 14:27:14 crc kubenswrapper[4816]: I0216 14:27:14.927746 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdl4c"] Feb 16 14:27:14 crc kubenswrapper[4816]: I0216 14:27:14.928283 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kdl4c" podUID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerName="registry-server" containerID="cri-o://d8262534727a5b1c5204e65b128d73bb8ea775346a34e9ed18904be3eae5f5c6" gracePeriod=2 Feb 16 14:27:15 crc kubenswrapper[4816]: I0216 14:27:15.698247 4816 generic.go:334] "Generic (PLEG): container finished" podID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerID="d8262534727a5b1c5204e65b128d73bb8ea775346a34e9ed18904be3eae5f5c6" exitCode=0 Feb 16 14:27:15 crc kubenswrapper[4816]: I0216 14:27:15.698282 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdl4c" event={"ID":"b08ad524-8e6f-4729-98fe-d8ab42024727","Type":"ContainerDied","Data":"d8262534727a5b1c5204e65b128d73bb8ea775346a34e9ed18904be3eae5f5c6"} Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.412812 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.460964 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-utilities\") pod \"b08ad524-8e6f-4729-98fe-d8ab42024727\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.461016 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vk7ds\" (UniqueName: \"kubernetes.io/projected/b08ad524-8e6f-4729-98fe-d8ab42024727-kube-api-access-vk7ds\") pod \"b08ad524-8e6f-4729-98fe-d8ab42024727\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.461043 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-catalog-content\") pod \"b08ad524-8e6f-4729-98fe-d8ab42024727\" (UID: \"b08ad524-8e6f-4729-98fe-d8ab42024727\") " Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.461719 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-utilities" (OuterVolumeSpecName: "utilities") pod "b08ad524-8e6f-4729-98fe-d8ab42024727" (UID: "b08ad524-8e6f-4729-98fe-d8ab42024727"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.488048 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b08ad524-8e6f-4729-98fe-d8ab42024727-kube-api-access-vk7ds" (OuterVolumeSpecName: "kube-api-access-vk7ds") pod "b08ad524-8e6f-4729-98fe-d8ab42024727" (UID: "b08ad524-8e6f-4729-98fe-d8ab42024727"). InnerVolumeSpecName "kube-api-access-vk7ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.488981 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b08ad524-8e6f-4729-98fe-d8ab42024727" (UID: "b08ad524-8e6f-4729-98fe-d8ab42024727"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.563006 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.563052 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vk7ds\" (UniqueName: \"kubernetes.io/projected/b08ad524-8e6f-4729-98fe-d8ab42024727-kube-api-access-vk7ds\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.563063 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08ad524-8e6f-4729-98fe-d8ab42024727-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.706775 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kdl4c" event={"ID":"b08ad524-8e6f-4729-98fe-d8ab42024727","Type":"ContainerDied","Data":"a8ebffcd8f7929c22443c8eab08b8bc8c7100a085c4370560a906130d5bb22a8"} Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.706824 4816 scope.go:117] "RemoveContainer" containerID="d8262534727a5b1c5204e65b128d73bb8ea775346a34e9ed18904be3eae5f5c6" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.706951 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kdl4c" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.729120 4816 scope.go:117] "RemoveContainer" containerID="9c4bed88a7d8b0c266313a96e2f8178a6877276659125316923cbc38a6e1e2b8" Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.748475 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdl4c"] Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.757376 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kdl4c"] Feb 16 14:27:16 crc kubenswrapper[4816]: I0216 14:27:16.772093 4816 scope.go:117] "RemoveContainer" containerID="63a6216ae9a2ad87b2396d33610ddf4025e76e857422d22df98358a70156c7a4" Feb 16 14:27:17 crc kubenswrapper[4816]: I0216 14:27:17.410722 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b08ad524-8e6f-4729-98fe-d8ab42024727" path="/var/lib/kubelet/pods/b08ad524-8e6f-4729-98fe-d8ab42024727/volumes" Feb 16 14:27:18 crc kubenswrapper[4816]: I0216 14:27:18.960978 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:18 crc kubenswrapper[4816]: I0216 14:27:18.961405 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:20 crc kubenswrapper[4816]: I0216 14:27:20.001804 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-55kz7" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerName="registry-server" probeResult="failure" output=< Feb 16 14:27:20 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 14:27:20 crc kubenswrapper[4816]: > Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.635917 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 16 14:27:24 crc kubenswrapper[4816]: E0216 14:27:24.636721 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerName="registry-server" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.636773 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerName="registry-server" Feb 16 14:27:24 crc kubenswrapper[4816]: E0216 14:27:24.636781 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerName="extract-content" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.636787 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerName="extract-content" Feb 16 14:27:24 crc kubenswrapper[4816]: E0216 14:27:24.636805 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerName="extract-utilities" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.636811 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerName="extract-utilities" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.637733 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="b08ad524-8e6f-4729-98fe-d8ab42024727" containerName="registry-server" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.639005 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.640870 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.641334 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-smxvt" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.649038 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.656212 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.657690 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.658292 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.671887 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.673645 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.674976 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05759484-3ff6-46f6-9aa8-7080fbcdaed2-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.675057 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d7bafa15-0693-4347-a1e9-443721a05d9c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d7bafa15-0693-4347-a1e9-443721a05d9c\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.675101 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/05759484-3ff6-46f6-9aa8-7080fbcdaed2-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.675168 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05759484-3ff6-46f6-9aa8-7080fbcdaed2-config\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.675209 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05759484-3ff6-46f6-9aa8-7080fbcdaed2-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.675255 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlxzv\" (UniqueName: \"kubernetes.io/projected/05759484-3ff6-46f6-9aa8-7080fbcdaed2-kube-api-access-zlxzv\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.679212 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.691221 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.776679 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m65ln\" (UniqueName: \"kubernetes.io/projected/1662531d-5c69-4b0f-a95b-008fb425954c-kube-api-access-m65ln\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.776735 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ae5c0e69-71f0-45c4-80d9-4982eef9d9c4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae5c0e69-71f0-45c4-80d9-4982eef9d9c4\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.776854 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05759484-3ff6-46f6-9aa8-7080fbcdaed2-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.777846 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d7bafa15-0693-4347-a1e9-443721a05d9c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d7bafa15-0693-4347-a1e9-443721a05d9c\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.777929 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/05759484-3ff6-46f6-9aa8-7080fbcdaed2-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.777952 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1662531d-5c69-4b0f-a95b-008fb425954c-config\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.778046 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1662531d-5c69-4b0f-a95b-008fb425954c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.778087 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05759484-3ff6-46f6-9aa8-7080fbcdaed2-config\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.778129 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05759484-3ff6-46f6-9aa8-7080fbcdaed2-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.778191 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1662531d-5c69-4b0f-a95b-008fb425954c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.778275 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1662531d-5c69-4b0f-a95b-008fb425954c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.778323 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlxzv\" (UniqueName: \"kubernetes.io/projected/05759484-3ff6-46f6-9aa8-7080fbcdaed2-kube-api-access-zlxzv\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.778390 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/05759484-3ff6-46f6-9aa8-7080fbcdaed2-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.778726 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05759484-3ff6-46f6-9aa8-7080fbcdaed2-config\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.778785 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05759484-3ff6-46f6-9aa8-7080fbcdaed2-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.781965 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.782023 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d7bafa15-0693-4347-a1e9-443721a05d9c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d7bafa15-0693-4347-a1e9-443721a05d9c\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a7138b8b1b4e5e5931050c4292062f87a3a3524eecbd748bbf155408df3077e4/globalmount\"" pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.785087 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05759484-3ff6-46f6-9aa8-7080fbcdaed2-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.799751 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlxzv\" (UniqueName: \"kubernetes.io/projected/05759484-3ff6-46f6-9aa8-7080fbcdaed2-kube-api-access-zlxzv\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.833847 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.835495 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.843231 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.843972 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.846198 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-bwnw2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.846752 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d7bafa15-0693-4347-a1e9-443721a05d9c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d7bafa15-0693-4347-a1e9-443721a05d9c\") pod \"ovsdbserver-nb-1\" (UID: \"05759484-3ff6-46f6-9aa8-7080fbcdaed2\") " pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.856866 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.867162 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.873314 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.879698 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0f55e11-6d2f-4148-af12-86568fca039d-config\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.879766 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b0f55e11-6d2f-4148-af12-86568fca039d-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.879798 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v6hq\" (UniqueName: \"kubernetes.io/projected/b0f55e11-6d2f-4148-af12-86568fca039d-kube-api-access-5v6hq\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.879874 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ff9f27e0-7261-4629-8f11-6d81ac110488\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff9f27e0-7261-4629-8f11-6d81ac110488\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.879919 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1662531d-5c69-4b0f-a95b-008fb425954c-config\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.879950 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0f55e11-6d2f-4148-af12-86568fca039d-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.879990 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0f55e11-6d2f-4148-af12-86568fca039d-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.880075 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1662531d-5c69-4b0f-a95b-008fb425954c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.880158 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1662531d-5c69-4b0f-a95b-008fb425954c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.880199 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1662531d-5c69-4b0f-a95b-008fb425954c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.880263 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m65ln\" (UniqueName: \"kubernetes.io/projected/1662531d-5c69-4b0f-a95b-008fb425954c-kube-api-access-m65ln\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.880308 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ae5c0e69-71f0-45c4-80d9-4982eef9d9c4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae5c0e69-71f0-45c4-80d9-4982eef9d9c4\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.881637 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1662531d-5c69-4b0f-a95b-008fb425954c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.882006 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1662531d-5c69-4b0f-a95b-008fb425954c-config\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.883058 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1662531d-5c69-4b0f-a95b-008fb425954c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.884945 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.884987 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ae5c0e69-71f0-45c4-80d9-4982eef9d9c4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae5c0e69-71f0-45c4-80d9-4982eef9d9c4\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b857faf1e753fc29dd208c5d74d98775c864d6a9c158653fb2a06af8910ea436/globalmount\"" pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.900009 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1662531d-5c69-4b0f-a95b-008fb425954c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.908840 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.910332 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.915490 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m65ln\" (UniqueName: \"kubernetes.io/projected/1662531d-5c69-4b0f-a95b-008fb425954c-kube-api-access-m65ln\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.917879 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.924975 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ae5c0e69-71f0-45c4-80d9-4982eef9d9c4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae5c0e69-71f0-45c4-80d9-4982eef9d9c4\") pod \"ovsdbserver-nb-0\" (UID: \"1662531d-5c69-4b0f-a95b-008fb425954c\") " pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.927590 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.971233 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982299 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3d5a108c-cf1e-4484-be11-3f5cb31607df-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982461 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0f55e11-6d2f-4148-af12-86568fca039d-config\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982523 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b0f55e11-6d2f-4148-af12-86568fca039d-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982543 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v6hq\" (UniqueName: \"kubernetes.io/projected/b0f55e11-6d2f-4148-af12-86568fca039d-kube-api-access-5v6hq\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982598 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ff9f27e0-7261-4629-8f11-6d81ac110488\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff9f27e0-7261-4629-8f11-6d81ac110488\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982619 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0882ccc-80ad-4170-92d6-bf10c5c45980-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982674 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjwx6\" (UniqueName: \"kubernetes.io/projected/3d5a108c-cf1e-4484-be11-3f5cb31607df-kube-api-access-vjwx6\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982702 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0882ccc-80ad-4170-92d6-bf10c5c45980-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982764 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d0882ccc-80ad-4170-92d6-bf10c5c45980-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982828 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0f55e11-6d2f-4148-af12-86568fca039d-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982856 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d5a108c-cf1e-4484-be11-3f5cb31607df-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982906 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0f55e11-6d2f-4148-af12-86568fca039d-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.982988 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwnzd\" (UniqueName: \"kubernetes.io/projected/d0882ccc-80ad-4170-92d6-bf10c5c45980-kube-api-access-dwnzd\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.983028 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0882ccc-80ad-4170-92d6-bf10c5c45980-config\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.983078 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7e3993ec-41de-4fe4-a584-9eaf8cc43f2c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7e3993ec-41de-4fe4-a584-9eaf8cc43f2c\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.983105 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5a108c-cf1e-4484-be11-3f5cb31607df-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.983205 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9f8ea2ad-0c93-4bd1-8e0c-b0fa7fd85beb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9f8ea2ad-0c93-4bd1-8e0c-b0fa7fd85beb\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.983230 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b0f55e11-6d2f-4148-af12-86568fca039d-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.983256 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d5a108c-cf1e-4484-be11-3f5cb31607df-config\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.983589 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0f55e11-6d2f-4148-af12-86568fca039d-config\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.983988 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.987353 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.987388 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ff9f27e0-7261-4629-8f11-6d81ac110488\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff9f27e0-7261-4629-8f11-6d81ac110488\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5b74a383aa0560526e0b48c1689d0325651a56d9ce815a243feaca24e1c6e3cf/globalmount\"" pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.987481 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0f55e11-6d2f-4148-af12-86568fca039d-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:24 crc kubenswrapper[4816]: I0216 14:27:24.988448 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0f55e11-6d2f-4148-af12-86568fca039d-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.003828 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v6hq\" (UniqueName: \"kubernetes.io/projected/b0f55e11-6d2f-4148-af12-86568fca039d-kube-api-access-5v6hq\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.017857 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ff9f27e0-7261-4629-8f11-6d81ac110488\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff9f27e0-7261-4629-8f11-6d81ac110488\") pod \"ovsdbserver-nb-2\" (UID: \"b0f55e11-6d2f-4148-af12-86568fca039d\") " pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084557 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3d5a108c-cf1e-4484-be11-3f5cb31607df-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084627 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2cea967e-94b6-4e50-bab8-e354ae2c67cf-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084669 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkd5r\" (UniqueName: \"kubernetes.io/projected/2cea967e-94b6-4e50-bab8-e354ae2c67cf-kube-api-access-hkd5r\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084691 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cea967e-94b6-4e50-bab8-e354ae2c67cf-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084713 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0882ccc-80ad-4170-92d6-bf10c5c45980-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084731 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-782a9916-589b-46ae-97f3-3818af675473\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-782a9916-589b-46ae-97f3-3818af675473\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084751 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjwx6\" (UniqueName: \"kubernetes.io/projected/3d5a108c-cf1e-4484-be11-3f5cb31607df-kube-api-access-vjwx6\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084780 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0882ccc-80ad-4170-92d6-bf10c5c45980-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084799 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d0882ccc-80ad-4170-92d6-bf10c5c45980-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084818 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d5a108c-cf1e-4484-be11-3f5cb31607df-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084854 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cea967e-94b6-4e50-bab8-e354ae2c67cf-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084881 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwnzd\" (UniqueName: \"kubernetes.io/projected/d0882ccc-80ad-4170-92d6-bf10c5c45980-kube-api-access-dwnzd\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084897 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cea967e-94b6-4e50-bab8-e354ae2c67cf-config\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084915 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0882ccc-80ad-4170-92d6-bf10c5c45980-config\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084931 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7e3993ec-41de-4fe4-a584-9eaf8cc43f2c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7e3993ec-41de-4fe4-a584-9eaf8cc43f2c\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084946 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5a108c-cf1e-4484-be11-3f5cb31607df-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084981 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9f8ea2ad-0c93-4bd1-8e0c-b0fa7fd85beb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9f8ea2ad-0c93-4bd1-8e0c-b0fa7fd85beb\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.084999 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d5a108c-cf1e-4484-be11-3f5cb31607df-config\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.085215 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3d5a108c-cf1e-4484-be11-3f5cb31607df-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.085974 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d5a108c-cf1e-4484-be11-3f5cb31607df-config\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.086069 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d0882ccc-80ad-4170-92d6-bf10c5c45980-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.086376 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0882ccc-80ad-4170-92d6-bf10c5c45980-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.088125 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d5a108c-cf1e-4484-be11-3f5cb31607df-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.089502 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0882ccc-80ad-4170-92d6-bf10c5c45980-config\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.094304 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.094574 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9f8ea2ad-0c93-4bd1-8e0c-b0fa7fd85beb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9f8ea2ad-0c93-4bd1-8e0c-b0fa7fd85beb\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/037cc7d3801cc835f899cc338ea62a45531e5040470bc769c65e93f63f1963c7/globalmount\"" pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.094478 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0882ccc-80ad-4170-92d6-bf10c5c45980-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.095593 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.095671 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7e3993ec-41de-4fe4-a584-9eaf8cc43f2c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7e3993ec-41de-4fe4-a584-9eaf8cc43f2c\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f2a4545c099d517e04b2c03babadc2fa777eaa30e91c152c0353a333a35c8b4a/globalmount\"" pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.104417 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjwx6\" (UniqueName: \"kubernetes.io/projected/3d5a108c-cf1e-4484-be11-3f5cb31607df-kube-api-access-vjwx6\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.104455 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5a108c-cf1e-4484-be11-3f5cb31607df-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.110465 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwnzd\" (UniqueName: \"kubernetes.io/projected/d0882ccc-80ad-4170-92d6-bf10c5c45980-kube-api-access-dwnzd\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.147909 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7e3993ec-41de-4fe4-a584-9eaf8cc43f2c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7e3993ec-41de-4fe4-a584-9eaf8cc43f2c\") pod \"ovsdbserver-sb-0\" (UID: \"3d5a108c-cf1e-4484-be11-3f5cb31607df\") " pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.156430 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9f8ea2ad-0c93-4bd1-8e0c-b0fa7fd85beb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9f8ea2ad-0c93-4bd1-8e0c-b0fa7fd85beb\") pod \"ovsdbserver-sb-2\" (UID: \"d0882ccc-80ad-4170-92d6-bf10c5c45980\") " pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.182180 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.188014 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2cea967e-94b6-4e50-bab8-e354ae2c67cf-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.188077 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkd5r\" (UniqueName: \"kubernetes.io/projected/2cea967e-94b6-4e50-bab8-e354ae2c67cf-kube-api-access-hkd5r\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.188110 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cea967e-94b6-4e50-bab8-e354ae2c67cf-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.188151 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-782a9916-589b-46ae-97f3-3818af675473\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-782a9916-589b-46ae-97f3-3818af675473\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.188222 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cea967e-94b6-4e50-bab8-e354ae2c67cf-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.188265 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cea967e-94b6-4e50-bab8-e354ae2c67cf-config\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.188854 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2cea967e-94b6-4e50-bab8-e354ae2c67cf-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.189299 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cea967e-94b6-4e50-bab8-e354ae2c67cf-config\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.190915 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.190979 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-782a9916-589b-46ae-97f3-3818af675473\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-782a9916-589b-46ae-97f3-3818af675473\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f0b558185be3465c2a94525c11ac1dedc8be18997c8f54cbe10b2fb44fc99fe4/globalmount\"" pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.191533 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cea967e-94b6-4e50-bab8-e354ae2c67cf-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.199534 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cea967e-94b6-4e50-bab8-e354ae2c67cf-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.206028 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkd5r\" (UniqueName: \"kubernetes.io/projected/2cea967e-94b6-4e50-bab8-e354ae2c67cf-kube-api-access-hkd5r\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.226502 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-782a9916-589b-46ae-97f3-3818af675473\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-782a9916-589b-46ae-97f3-3818af675473\") pod \"ovsdbserver-sb-1\" (UID: \"2cea967e-94b6-4e50-bab8-e354ae2c67cf\") " pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.253623 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.262044 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.294598 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.518397 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Feb 16 14:27:25 crc kubenswrapper[4816]: W0216 14:27:25.521728 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05759484_3ff6_46f6_9aa8_7080fbcdaed2.slice/crio-065bb077caad2acbdf31ff87075623adcd7b793f63fc0799441f6061f972ecd6 WatchSource:0}: Error finding container 065bb077caad2acbdf31ff87075623adcd7b793f63fc0799441f6061f972ecd6: Status 404 returned error can't find the container with id 065bb077caad2acbdf31ff87075623adcd7b793f63fc0799441f6061f972ecd6 Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.614747 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 16 14:27:25 crc kubenswrapper[4816]: W0216 14:27:25.623299 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1662531d_5c69_4b0f_a95b_008fb425954c.slice/crio-a7a8b421ce55c2e43082f2c7feb06031b13f68bbe0c223209203513ab29adc75 WatchSource:0}: Error finding container a7a8b421ce55c2e43082f2c7feb06031b13f68bbe0c223209203513ab29adc75: Status 404 returned error can't find the container with id a7a8b421ce55c2e43082f2c7feb06031b13f68bbe0c223209203513ab29adc75 Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.704987 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 16 14:27:25 crc kubenswrapper[4816]: W0216 14:27:25.711145 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d5a108c_cf1e_4484_be11_3f5cb31607df.slice/crio-e5eae04718e80e11f2c3d55d580173fa6cb6c4f11d8d93a603bc178fcb3baf50 WatchSource:0}: Error finding container e5eae04718e80e11f2c3d55d580173fa6cb6c4f11d8d93a603bc178fcb3baf50: Status 404 returned error can't find the container with id e5eae04718e80e11f2c3d55d580173fa6cb6c4f11d8d93a603bc178fcb3baf50 Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.850928 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3d5a108c-cf1e-4484-be11-3f5cb31607df","Type":"ContainerStarted","Data":"e5eae04718e80e11f2c3d55d580173fa6cb6c4f11d8d93a603bc178fcb3baf50"} Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.853182 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.854464 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"05759484-3ff6-46f6-9aa8-7080fbcdaed2","Type":"ContainerStarted","Data":"0d7053d67970b48f854ed53b12a3b5268703c8535b8f00fa62fec0d516627723"} Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.854491 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"05759484-3ff6-46f6-9aa8-7080fbcdaed2","Type":"ContainerStarted","Data":"065bb077caad2acbdf31ff87075623adcd7b793f63fc0799441f6061f972ecd6"} Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.872173 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"1662531d-5c69-4b0f-a95b-008fb425954c","Type":"ContainerStarted","Data":"127dd560b02286906b9becd011a0955c9000d1d3a85d5431a7ef8f7cdf274141"} Feb 16 14:27:25 crc kubenswrapper[4816]: I0216 14:27:25.872230 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"1662531d-5c69-4b0f-a95b-008fb425954c","Type":"ContainerStarted","Data":"a7a8b421ce55c2e43082f2c7feb06031b13f68bbe0c223209203513ab29adc75"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.592115 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Feb 16 14:27:26 crc kubenswrapper[4816]: W0216 14:27:26.594887 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0882ccc_80ad_4170_92d6_bf10c5c45980.slice/crio-e130f7b940bfa90514139f170b11ad069c4156a365abbaf09537f5ac3b1e5bfb WatchSource:0}: Error finding container e130f7b940bfa90514139f170b11ad069c4156a365abbaf09537f5ac3b1e5bfb: Status 404 returned error can't find the container with id e130f7b940bfa90514139f170b11ad069c4156a365abbaf09537f5ac3b1e5bfb Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.833875 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Feb 16 14:27:26 crc kubenswrapper[4816]: W0216 14:27:26.843004 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0f55e11_6d2f_4148_af12_86568fca039d.slice/crio-ef8f60ab7bcd83f962342068f9ac3d24781bb87870a42c73dd977a5dfb29c4c9 WatchSource:0}: Error finding container ef8f60ab7bcd83f962342068f9ac3d24781bb87870a42c73dd977a5dfb29c4c9: Status 404 returned error can't find the container with id ef8f60ab7bcd83f962342068f9ac3d24781bb87870a42c73dd977a5dfb29c4c9 Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.895732 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"2cea967e-94b6-4e50-bab8-e354ae2c67cf","Type":"ContainerStarted","Data":"6b1710787e3fb3792a2d3af71aa30402a59d6d30ddd73f5f9586223f384358e9"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.895800 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"2cea967e-94b6-4e50-bab8-e354ae2c67cf","Type":"ContainerStarted","Data":"7b0021557f5ef22c90ae4d0b781493c760787033f1b7842b3191680c294e3823"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.895813 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"2cea967e-94b6-4e50-bab8-e354ae2c67cf","Type":"ContainerStarted","Data":"1cad081ac44395dd967b342f53b39ea99842d462f74c867a9873eff68c03ecd8"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.897235 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"05759484-3ff6-46f6-9aa8-7080fbcdaed2","Type":"ContainerStarted","Data":"03050ebe73c3300f486f915821ae8b7ae3c6621c8d2dd17539859c1a20b60d22"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.899444 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"b0f55e11-6d2f-4148-af12-86568fca039d","Type":"ContainerStarted","Data":"ef8f60ab7bcd83f962342068f9ac3d24781bb87870a42c73dd977a5dfb29c4c9"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.901616 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"1662531d-5c69-4b0f-a95b-008fb425954c","Type":"ContainerStarted","Data":"5b6bd4fb88fc829569176c283490e84e7ee9793b5705282ccc25442338af44c9"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.904257 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3d5a108c-cf1e-4484-be11-3f5cb31607df","Type":"ContainerStarted","Data":"ec1464dee57f50bfe9a4fb278efe20c43d6e878e395a0b13df9b710103d52882"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.904292 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"3d5a108c-cf1e-4484-be11-3f5cb31607df","Type":"ContainerStarted","Data":"829a6b35aa2e025f5b368b9560c418d74eb41b4dbfa8d489a84f63c0399f06fb"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.907927 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"d0882ccc-80ad-4170-92d6-bf10c5c45980","Type":"ContainerStarted","Data":"c8c5e1e5f46ddacb05f30234c45fac3d411758eafed520eaeab29668fb61736d"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.907968 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"d0882ccc-80ad-4170-92d6-bf10c5c45980","Type":"ContainerStarted","Data":"ca5023c1685fdb0ab77a855374b317265180d178e5db6bca505d8ee372b2f66a"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.907977 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"d0882ccc-80ad-4170-92d6-bf10c5c45980","Type":"ContainerStarted","Data":"e130f7b940bfa90514139f170b11ad069c4156a365abbaf09537f5ac3b1e5bfb"} Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.918172 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=3.9181476379999998 podStartE2EDuration="3.918147638s" podCreationTimestamp="2026-02-16 14:27:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:26.918072656 +0000 UTC m=+5046.244786394" watchObservedRunningTime="2026-02-16 14:27:26.918147638 +0000 UTC m=+5046.244861376" Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.949737 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=3.949710588 podStartE2EDuration="3.949710588s" podCreationTimestamp="2026-02-16 14:27:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:26.936038965 +0000 UTC m=+5046.262752703" watchObservedRunningTime="2026-02-16 14:27:26.949710588 +0000 UTC m=+5046.276424316" Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.973077 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.973055016 podStartE2EDuration="3.973055016s" podCreationTimestamp="2026-02-16 14:27:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:26.962159708 +0000 UTC m=+5046.288873446" watchObservedRunningTime="2026-02-16 14:27:26.973055016 +0000 UTC m=+5046.299768744" Feb 16 14:27:26 crc kubenswrapper[4816]: I0216 14:27:26.992201 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.992181187 podStartE2EDuration="3.992181187s" podCreationTimestamp="2026-02-16 14:27:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:26.976114669 +0000 UTC m=+5046.302828417" watchObservedRunningTime="2026-02-16 14:27:26.992181187 +0000 UTC m=+5046.318894915" Feb 16 14:27:27 crc kubenswrapper[4816]: I0216 14:27:27.001879 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.001861641 podStartE2EDuration="4.001861641s" podCreationTimestamp="2026-02-16 14:27:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:26.994575642 +0000 UTC m=+5046.321289370" watchObservedRunningTime="2026-02-16 14:27:27.001861641 +0000 UTC m=+5046.328575369" Feb 16 14:27:27 crc kubenswrapper[4816]: I0216 14:27:27.214958 4816 scope.go:117] "RemoveContainer" containerID="1a07e5feff28041a7114033461c1b20ec881ef1f93bfa1c4b6e6a28d50259e01" Feb 16 14:27:27 crc kubenswrapper[4816]: I0216 14:27:27.922167 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"b0f55e11-6d2f-4148-af12-86568fca039d","Type":"ContainerStarted","Data":"dac041353c56bf7cec3a83be49aa7810b3a9684fc0933737dd45efd56c7e4fa4"} Feb 16 14:27:27 crc kubenswrapper[4816]: I0216 14:27:27.922536 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"b0f55e11-6d2f-4148-af12-86568fca039d","Type":"ContainerStarted","Data":"254bee1eb5b28747ec92b50905bf671911f9b001d4f695a9cfbbc5394f98274e"} Feb 16 14:27:27 crc kubenswrapper[4816]: I0216 14:27:27.946142 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=4.946115697 podStartE2EDuration="4.946115697s" podCreationTimestamp="2026-02-16 14:27:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:27.939709862 +0000 UTC m=+5047.266423650" watchObservedRunningTime="2026-02-16 14:27:27.946115697 +0000 UTC m=+5047.272829455" Feb 16 14:27:27 crc kubenswrapper[4816]: I0216 14:27:27.971995 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:27 crc kubenswrapper[4816]: I0216 14:27:27.984398 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:28 crc kubenswrapper[4816]: I0216 14:27:28.182517 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:28 crc kubenswrapper[4816]: I0216 14:27:28.254481 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:28 crc kubenswrapper[4816]: I0216 14:27:28.262531 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:28 crc kubenswrapper[4816]: I0216 14:27:28.294833 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:29 crc kubenswrapper[4816]: I0216 14:27:29.020166 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:29 crc kubenswrapper[4816]: I0216 14:27:29.070506 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:29 crc kubenswrapper[4816]: I0216 14:27:29.253804 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-55kz7"] Feb 16 14:27:29 crc kubenswrapper[4816]: I0216 14:27:29.971692 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:29 crc kubenswrapper[4816]: I0216 14:27:29.984554 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:30 crc kubenswrapper[4816]: I0216 14:27:30.183305 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:30 crc kubenswrapper[4816]: I0216 14:27:30.254512 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:30 crc kubenswrapper[4816]: I0216 14:27:30.262629 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:30 crc kubenswrapper[4816]: I0216 14:27:30.295631 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:30 crc kubenswrapper[4816]: I0216 14:27:30.950648 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-55kz7" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerName="registry-server" containerID="cri-o://54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d" gracePeriod=2 Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.022791 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.029208 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.068881 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.075389 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.269698 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.322107 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.341186 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.358062 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9f466ffc7-55pmg"] Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.361494 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.365512 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.377805 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.398974 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9f466ffc7-55pmg"] Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.427701 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.427747 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.432564 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.508514 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-ovsdbserver-nb\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.508675 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-config\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.509569 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsl45\" (UniqueName: \"kubernetes.io/projected/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-kube-api-access-nsl45\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.509754 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-dns-svc\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.536087 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.611642 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-config\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.611784 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsl45\" (UniqueName: \"kubernetes.io/projected/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-kube-api-access-nsl45\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.611835 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-dns-svc\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.611896 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-ovsdbserver-nb\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.612920 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-dns-svc\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.612947 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-ovsdbserver-nb\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.613121 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-config\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.648980 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsl45\" (UniqueName: \"kubernetes.io/projected/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-kube-api-access-nsl45\") pod \"dnsmasq-dns-9f466ffc7-55pmg\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.686566 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9f466ffc7-55pmg"] Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.687265 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.713390 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-utilities\") pod \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.713545 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-catalog-content\") pod \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.713681 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlkxm\" (UniqueName: \"kubernetes.io/projected/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-kube-api-access-dlkxm\") pod \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\" (UID: \"bd54e1b2-d81c-4602-a8b5-3dbe898093ed\") " Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.714648 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-utilities" (OuterVolumeSpecName: "utilities") pod "bd54e1b2-d81c-4602-a8b5-3dbe898093ed" (UID: "bd54e1b2-d81c-4602-a8b5-3dbe898093ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.721134 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c5d6d45cc-7dcf4"] Feb 16 14:27:31 crc kubenswrapper[4816]: E0216 14:27:31.721559 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerName="extract-content" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.721583 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerName="extract-content" Feb 16 14:27:31 crc kubenswrapper[4816]: E0216 14:27:31.721608 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerName="extract-utilities" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.721617 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerName="extract-utilities" Feb 16 14:27:31 crc kubenswrapper[4816]: E0216 14:27:31.721673 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerName="registry-server" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.721684 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerName="registry-server" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.721866 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerName="registry-server" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.722945 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.723938 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-kube-api-access-dlkxm" (OuterVolumeSpecName: "kube-api-access-dlkxm") pod "bd54e1b2-d81c-4602-a8b5-3dbe898093ed" (UID: "bd54e1b2-d81c-4602-a8b5-3dbe898093ed"). InnerVolumeSpecName "kube-api-access-dlkxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.726251 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.751097 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c5d6d45cc-7dcf4"] Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.815078 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-sb\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.815339 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-nb\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.815375 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-config\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.815392 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9jsm\" (UniqueName: \"kubernetes.io/projected/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-kube-api-access-m9jsm\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.815431 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-dns-svc\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.815503 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlkxm\" (UniqueName: \"kubernetes.io/projected/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-kube-api-access-dlkxm\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.815515 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.871484 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd54e1b2-d81c-4602-a8b5-3dbe898093ed" (UID: "bd54e1b2-d81c-4602-a8b5-3dbe898093ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.917639 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-nb\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.917719 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-config\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.917740 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9jsm\" (UniqueName: \"kubernetes.io/projected/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-kube-api-access-m9jsm\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.917779 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-dns-svc\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.917835 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-sb\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.917892 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd54e1b2-d81c-4602-a8b5-3dbe898093ed-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.918779 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-sb\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.919428 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-config\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.919576 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-dns-svc\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.920353 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-nb\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.935901 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9jsm\" (UniqueName: \"kubernetes.io/projected/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-kube-api-access-m9jsm\") pod \"dnsmasq-dns-6c5d6d45cc-7dcf4\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.968201 4816 generic.go:334] "Generic (PLEG): container finished" podID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" containerID="54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d" exitCode=0 Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.969023 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-55kz7" event={"ID":"bd54e1b2-d81c-4602-a8b5-3dbe898093ed","Type":"ContainerDied","Data":"54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d"} Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.969096 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-55kz7" event={"ID":"bd54e1b2-d81c-4602-a8b5-3dbe898093ed","Type":"ContainerDied","Data":"4d25b72707e7cc65065d020e956c04ee51249ca42834f26bef350c8b20d5b38d"} Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.969107 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-55kz7" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.969125 4816 scope.go:117] "RemoveContainer" containerID="54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d" Feb 16 14:27:31 crc kubenswrapper[4816]: I0216 14:27:31.998756 4816 scope.go:117] "RemoveContainer" containerID="422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af" Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.005308 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-55kz7"] Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.013210 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-55kz7"] Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.019331 4816 scope.go:117] "RemoveContainer" containerID="a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197" Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.035729 4816 scope.go:117] "RemoveContainer" containerID="54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d" Feb 16 14:27:32 crc kubenswrapper[4816]: E0216 14:27:32.036419 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d\": container with ID starting with 54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d not found: ID does not exist" containerID="54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d" Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.036459 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d"} err="failed to get container status \"54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d\": rpc error: code = NotFound desc = could not find container \"54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d\": container with ID starting with 54bf3669b4a43e4ea8ffbb5a867a4d5b0002d9bb6f2b51cb3a42637054bc3c5d not found: ID does not exist" Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.036485 4816 scope.go:117] "RemoveContainer" containerID="422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af" Feb 16 14:27:32 crc kubenswrapper[4816]: E0216 14:27:32.036813 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af\": container with ID starting with 422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af not found: ID does not exist" containerID="422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af" Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.036846 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af"} err="failed to get container status \"422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af\": rpc error: code = NotFound desc = could not find container \"422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af\": container with ID starting with 422d87deccbff255fe2d4ebef29830e4bd444f841a6a47f5c0cf36c2a81a54af not found: ID does not exist" Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.036870 4816 scope.go:117] "RemoveContainer" containerID="a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197" Feb 16 14:27:32 crc kubenswrapper[4816]: E0216 14:27:32.037063 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197\": container with ID starting with a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197 not found: ID does not exist" containerID="a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197" Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.037087 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197"} err="failed to get container status \"a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197\": rpc error: code = NotFound desc = could not find container \"a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197\": container with ID starting with a1d3433ab85705a6a7f7f913861fb34706e3c969d7291b3fe4be57f62b9ce197 not found: ID does not exist" Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.086400 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.185774 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9f466ffc7-55pmg"] Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.314787 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c5d6d45cc-7dcf4"] Feb 16 14:27:32 crc kubenswrapper[4816]: W0216 14:27:32.321556 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdda3ff5_4e68_41bf_a02d_6c5c3d5f854c.slice/crio-653d3deb1cbaac3d77468542ce8c854f5ded32826791e337f82285ae52453e11 WatchSource:0}: Error finding container 653d3deb1cbaac3d77468542ce8c854f5ded32826791e337f82285ae52453e11: Status 404 returned error can't find the container with id 653d3deb1cbaac3d77468542ce8c854f5ded32826791e337f82285ae52453e11 Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.981379 4816 generic.go:334] "Generic (PLEG): container finished" podID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" containerID="649ec60e3054851ed900d5beb7e19cacf9b9200f6b5571dcf084f0b5f6bfae88" exitCode=0 Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.981435 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" event={"ID":"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c","Type":"ContainerDied","Data":"649ec60e3054851ed900d5beb7e19cacf9b9200f6b5571dcf084f0b5f6bfae88"} Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.981792 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" event={"ID":"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c","Type":"ContainerStarted","Data":"653d3deb1cbaac3d77468542ce8c854f5ded32826791e337f82285ae52453e11"} Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.983950 4816 generic.go:334] "Generic (PLEG): container finished" podID="891beb3f-86a1-4a8f-9a7e-5b6fdb80f168" containerID="930fa2f380b030e80d0efca060f7a10dcc00699aab20f8a99bf18a87c3226e85" exitCode=0 Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.984026 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" event={"ID":"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168","Type":"ContainerDied","Data":"930fa2f380b030e80d0efca060f7a10dcc00699aab20f8a99bf18a87c3226e85"} Feb 16 14:27:32 crc kubenswrapper[4816]: I0216 14:27:32.984053 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" event={"ID":"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168","Type":"ContainerStarted","Data":"6532d1d75d0970d4f8d0bd9d9c0da0556df4027747c7b274e89c23d018900d37"} Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.289606 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.344342 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-dns-svc\") pod \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.344406 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-config\") pod \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.344487 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-ovsdbserver-nb\") pod \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.344557 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsl45\" (UniqueName: \"kubernetes.io/projected/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-kube-api-access-nsl45\") pod \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\" (UID: \"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168\") " Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.349610 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-kube-api-access-nsl45" (OuterVolumeSpecName: "kube-api-access-nsl45") pod "891beb3f-86a1-4a8f-9a7e-5b6fdb80f168" (UID: "891beb3f-86a1-4a8f-9a7e-5b6fdb80f168"). InnerVolumeSpecName "kube-api-access-nsl45". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.362826 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "891beb3f-86a1-4a8f-9a7e-5b6fdb80f168" (UID: "891beb3f-86a1-4a8f-9a7e-5b6fdb80f168"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.365046 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-config" (OuterVolumeSpecName: "config") pod "891beb3f-86a1-4a8f-9a7e-5b6fdb80f168" (UID: "891beb3f-86a1-4a8f-9a7e-5b6fdb80f168"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.370725 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "891beb3f-86a1-4a8f-9a7e-5b6fdb80f168" (UID: "891beb3f-86a1-4a8f-9a7e-5b6fdb80f168"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.409926 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd54e1b2-d81c-4602-a8b5-3dbe898093ed" path="/var/lib/kubelet/pods/bd54e1b2-d81c-4602-a8b5-3dbe898093ed/volumes" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.446787 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.446818 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsl45\" (UniqueName: \"kubernetes.io/projected/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-kube-api-access-nsl45\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.446830 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.446838 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.998223 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" event={"ID":"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c","Type":"ContainerStarted","Data":"53c283b729b18f93462be9d74d6b4192007f3bd899ed5641ed30b51676c830ae"} Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.999147 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.999922 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" event={"ID":"891beb3f-86a1-4a8f-9a7e-5b6fdb80f168","Type":"ContainerDied","Data":"6532d1d75d0970d4f8d0bd9d9c0da0556df4027747c7b274e89c23d018900d37"} Feb 16 14:27:33 crc kubenswrapper[4816]: I0216 14:27:33.999975 4816 scope.go:117] "RemoveContainer" containerID="930fa2f380b030e80d0efca060f7a10dcc00699aab20f8a99bf18a87c3226e85" Feb 16 14:27:34 crc kubenswrapper[4816]: I0216 14:27:34.000190 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9f466ffc7-55pmg" Feb 16 14:27:34 crc kubenswrapper[4816]: I0216 14:27:34.016770 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" podStartSLOduration=3.016732657 podStartE2EDuration="3.016732657s" podCreationTimestamp="2026-02-16 14:27:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:34.015138733 +0000 UTC m=+5053.341852461" watchObservedRunningTime="2026-02-16 14:27:34.016732657 +0000 UTC m=+5053.343446385" Feb 16 14:27:34 crc kubenswrapper[4816]: I0216 14:27:34.061796 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9f466ffc7-55pmg"] Feb 16 14:27:34 crc kubenswrapper[4816]: I0216 14:27:34.070189 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9f466ffc7-55pmg"] Feb 16 14:27:35 crc kubenswrapper[4816]: I0216 14:27:35.335534 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Feb 16 14:27:35 crc kubenswrapper[4816]: I0216 14:27:35.415731 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="891beb3f-86a1-4a8f-9a7e-5b6fdb80f168" path="/var/lib/kubelet/pods/891beb3f-86a1-4a8f-9a7e-5b6fdb80f168/volumes" Feb 16 14:27:36 crc kubenswrapper[4816]: I0216 14:27:36.941188 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:27:36 crc kubenswrapper[4816]: I0216 14:27:36.941304 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.024224 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Feb 16 14:27:38 crc kubenswrapper[4816]: E0216 14:27:38.024726 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="891beb3f-86a1-4a8f-9a7e-5b6fdb80f168" containerName="init" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.024749 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="891beb3f-86a1-4a8f-9a7e-5b6fdb80f168" containerName="init" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.025016 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="891beb3f-86a1-4a8f-9a7e-5b6fdb80f168" containerName="init" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.026087 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.028767 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.045242 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.128456 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xzln\" (UniqueName: \"kubernetes.io/projected/f0778ea5-8a1a-4366-ab14-7ef29ec67351-kube-api-access-5xzln\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.129089 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/f0778ea5-8a1a-4366-ab14-7ef29ec67351-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.129190 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c21d94f4-d86c-4248-a0b6-151f44818268\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c21d94f4-d86c-4248-a0b6-151f44818268\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.230806 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/f0778ea5-8a1a-4366-ab14-7ef29ec67351-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.230960 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c21d94f4-d86c-4248-a0b6-151f44818268\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c21d94f4-d86c-4248-a0b6-151f44818268\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.231058 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xzln\" (UniqueName: \"kubernetes.io/projected/f0778ea5-8a1a-4366-ab14-7ef29ec67351-kube-api-access-5xzln\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.234583 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.234642 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c21d94f4-d86c-4248-a0b6-151f44818268\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c21d94f4-d86c-4248-a0b6-151f44818268\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/731b0de9ecd4409e43f64be30834a4c47f3016cf2126034cf71ab2a4d857d32c/globalmount\"" pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.239549 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/f0778ea5-8a1a-4366-ab14-7ef29ec67351-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.257857 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xzln\" (UniqueName: \"kubernetes.io/projected/f0778ea5-8a1a-4366-ab14-7ef29ec67351-kube-api-access-5xzln\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.296223 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c21d94f4-d86c-4248-a0b6-151f44818268\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c21d94f4-d86c-4248-a0b6-151f44818268\") pod \"ovn-copy-data\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.365576 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Feb 16 14:27:38 crc kubenswrapper[4816]: I0216 14:27:38.976919 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Feb 16 14:27:39 crc kubenswrapper[4816]: I0216 14:27:39.054352 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"f0778ea5-8a1a-4366-ab14-7ef29ec67351","Type":"ContainerStarted","Data":"f6cb4c25c3f778a44694437ad141e3ddd4bcffff745c856b4899c929ccec1e41"} Feb 16 14:27:40 crc kubenswrapper[4816]: I0216 14:27:40.066042 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"f0778ea5-8a1a-4366-ab14-7ef29ec67351","Type":"ContainerStarted","Data":"8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8"} Feb 16 14:27:40 crc kubenswrapper[4816]: I0216 14:27:40.093026 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=4.093003032 podStartE2EDuration="4.093003032s" podCreationTimestamp="2026-02-16 14:27:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:40.08630317 +0000 UTC m=+5059.413016918" watchObservedRunningTime="2026-02-16 14:27:40.093003032 +0000 UTC m=+5059.419716770" Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.088944 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.155723 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-vd6pc"] Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.156004 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" podUID="11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" containerName="dnsmasq-dns" containerID="cri-o://8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51" gracePeriod=10 Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.591352 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.617323 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pknkw\" (UniqueName: \"kubernetes.io/projected/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-kube-api-access-pknkw\") pod \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.617412 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-dns-svc\") pod \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.617452 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-config\") pod \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\" (UID: \"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c\") " Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.625016 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-kube-api-access-pknkw" (OuterVolumeSpecName: "kube-api-access-pknkw") pod "11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" (UID: "11dedb3d-4288-4a1d-b6e9-9f7066b9d00c"). InnerVolumeSpecName "kube-api-access-pknkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.653830 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-config" (OuterVolumeSpecName: "config") pod "11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" (UID: "11dedb3d-4288-4a1d-b6e9-9f7066b9d00c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.655901 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" (UID: "11dedb3d-4288-4a1d-b6e9-9f7066b9d00c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.719527 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pknkw\" (UniqueName: \"kubernetes.io/projected/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-kube-api-access-pknkw\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.719599 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:42 crc kubenswrapper[4816]: I0216 14:27:42.719627 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.100954 4816 generic.go:334] "Generic (PLEG): container finished" podID="11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" containerID="8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51" exitCode=0 Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.101004 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" event={"ID":"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c","Type":"ContainerDied","Data":"8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51"} Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.101009 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.101037 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-vd6pc" event={"ID":"11dedb3d-4288-4a1d-b6e9-9f7066b9d00c","Type":"ContainerDied","Data":"7fb0f1a9711fea0010f3617e78e7dfbda7f4468c16d0b3a7cb6d4848fcf8fea3"} Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.101057 4816 scope.go:117] "RemoveContainer" containerID="8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51" Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.123910 4816 scope.go:117] "RemoveContainer" containerID="32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487" Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.141856 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-vd6pc"] Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.148684 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-vd6pc"] Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.157992 4816 scope.go:117] "RemoveContainer" containerID="8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51" Feb 16 14:27:43 crc kubenswrapper[4816]: E0216 14:27:43.158879 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51\": container with ID starting with 8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51 not found: ID does not exist" containerID="8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51" Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.158938 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51"} err="failed to get container status \"8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51\": rpc error: code = NotFound desc = could not find container \"8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51\": container with ID starting with 8f2b61d96b9ef3f0cbe161c0a1d0572de912d4479047d624b0868e6385016f51 not found: ID does not exist" Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.158967 4816 scope.go:117] "RemoveContainer" containerID="32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487" Feb 16 14:27:43 crc kubenswrapper[4816]: E0216 14:27:43.159318 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487\": container with ID starting with 32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487 not found: ID does not exist" containerID="32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487" Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.159367 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487"} err="failed to get container status \"32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487\": rpc error: code = NotFound desc = could not find container \"32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487\": container with ID starting with 32da05009ec03a5a58fc8572f53863e0b802e95e6dba7e32775fc17d6827c487 not found: ID does not exist" Feb 16 14:27:43 crc kubenswrapper[4816]: I0216 14:27:43.408342 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" path="/var/lib/kubelet/pods/11dedb3d-4288-4a1d-b6e9-9f7066b9d00c/volumes" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:44.998301 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 16 14:27:45 crc kubenswrapper[4816]: E0216 14:27:45.006492 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" containerName="dnsmasq-dns" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.006515 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" containerName="dnsmasq-dns" Feb 16 14:27:45 crc kubenswrapper[4816]: E0216 14:27:45.006556 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" containerName="init" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.006564 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" containerName="init" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.006779 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="11dedb3d-4288-4a1d-b6e9-9f7066b9d00c" containerName="dnsmasq-dns" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.007780 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.008563 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.013992 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.014406 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-4qvdc" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.014677 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.156003 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vj9d\" (UniqueName: \"kubernetes.io/projected/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-kube-api-access-5vj9d\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.156053 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-scripts\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.156089 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-config\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.156117 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.156165 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.257904 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vj9d\" (UniqueName: \"kubernetes.io/projected/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-kube-api-access-5vj9d\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.257954 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-scripts\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.257981 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-config\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.258007 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.258037 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.259548 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-config\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.259557 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-scripts\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.260198 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.269102 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.289428 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vj9d\" (UniqueName: \"kubernetes.io/projected/38d8bf2c-034a-4755-a90e-8bb4c0d7d55c-kube-api-access-5vj9d\") pod \"ovn-northd-0\" (UID: \"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c\") " pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.329109 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 16 14:27:45 crc kubenswrapper[4816]: I0216 14:27:45.813229 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 16 14:27:45 crc kubenswrapper[4816]: W0216 14:27:45.818135 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38d8bf2c_034a_4755_a90e_8bb4c0d7d55c.slice/crio-17655831f331451593bec90bc3d5ed63219742de363df194bb33573fd8df9365 WatchSource:0}: Error finding container 17655831f331451593bec90bc3d5ed63219742de363df194bb33573fd8df9365: Status 404 returned error can't find the container with id 17655831f331451593bec90bc3d5ed63219742de363df194bb33573fd8df9365 Feb 16 14:27:46 crc kubenswrapper[4816]: I0216 14:27:46.124353 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c","Type":"ContainerStarted","Data":"fa547f4ed093300f349eac72027e9208cb0082710e3f8aaaaab1a9009119709d"} Feb 16 14:27:46 crc kubenswrapper[4816]: I0216 14:27:46.124938 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 16 14:27:46 crc kubenswrapper[4816]: I0216 14:27:46.124952 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c","Type":"ContainerStarted","Data":"efcd7829e0c4fd3d76719fa31201ab5ceed1d0d93a8328a4fee5e1eadbc7bec3"} Feb 16 14:27:46 crc kubenswrapper[4816]: I0216 14:27:46.124963 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"38d8bf2c-034a-4755-a90e-8bb4c0d7d55c","Type":"ContainerStarted","Data":"17655831f331451593bec90bc3d5ed63219742de363df194bb33573fd8df9365"} Feb 16 14:27:46 crc kubenswrapper[4816]: I0216 14:27:46.148969 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.148945173 podStartE2EDuration="2.148945173s" podCreationTimestamp="2026-02-16 14:27:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:46.13970445 +0000 UTC m=+5065.466418178" watchObservedRunningTime="2026-02-16 14:27:46.148945173 +0000 UTC m=+5065.475658901" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.167811 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-llwbm"] Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.169455 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.182074 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5ece-account-create-update-z78b8"] Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.183192 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.184681 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.194341 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-llwbm"] Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.222387 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5ece-account-create-update-z78b8"] Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.340245 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe106007-900d-482f-8029-155ca0185242-operator-scripts\") pod \"keystone-db-create-llwbm\" (UID: \"fe106007-900d-482f-8029-155ca0185242\") " pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.340377 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dml98\" (UniqueName: \"kubernetes.io/projected/da8a148e-702c-4553-a7b5-29268d0d3645-kube-api-access-dml98\") pod \"keystone-5ece-account-create-update-z78b8\" (UID: \"da8a148e-702c-4553-a7b5-29268d0d3645\") " pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.340409 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da8a148e-702c-4553-a7b5-29268d0d3645-operator-scripts\") pod \"keystone-5ece-account-create-update-z78b8\" (UID: \"da8a148e-702c-4553-a7b5-29268d0d3645\") " pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.340433 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l97dx\" (UniqueName: \"kubernetes.io/projected/fe106007-900d-482f-8029-155ca0185242-kube-api-access-l97dx\") pod \"keystone-db-create-llwbm\" (UID: \"fe106007-900d-482f-8029-155ca0185242\") " pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.441912 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dml98\" (UniqueName: \"kubernetes.io/projected/da8a148e-702c-4553-a7b5-29268d0d3645-kube-api-access-dml98\") pod \"keystone-5ece-account-create-update-z78b8\" (UID: \"da8a148e-702c-4553-a7b5-29268d0d3645\") " pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.441977 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da8a148e-702c-4553-a7b5-29268d0d3645-operator-scripts\") pod \"keystone-5ece-account-create-update-z78b8\" (UID: \"da8a148e-702c-4553-a7b5-29268d0d3645\") " pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.442013 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l97dx\" (UniqueName: \"kubernetes.io/projected/fe106007-900d-482f-8029-155ca0185242-kube-api-access-l97dx\") pod \"keystone-db-create-llwbm\" (UID: \"fe106007-900d-482f-8029-155ca0185242\") " pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.442040 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe106007-900d-482f-8029-155ca0185242-operator-scripts\") pod \"keystone-db-create-llwbm\" (UID: \"fe106007-900d-482f-8029-155ca0185242\") " pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.442955 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da8a148e-702c-4553-a7b5-29268d0d3645-operator-scripts\") pod \"keystone-5ece-account-create-update-z78b8\" (UID: \"da8a148e-702c-4553-a7b5-29268d0d3645\") " pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.442959 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe106007-900d-482f-8029-155ca0185242-operator-scripts\") pod \"keystone-db-create-llwbm\" (UID: \"fe106007-900d-482f-8029-155ca0185242\") " pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.460556 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l97dx\" (UniqueName: \"kubernetes.io/projected/fe106007-900d-482f-8029-155ca0185242-kube-api-access-l97dx\") pod \"keystone-db-create-llwbm\" (UID: \"fe106007-900d-482f-8029-155ca0185242\") " pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.467285 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dml98\" (UniqueName: \"kubernetes.io/projected/da8a148e-702c-4553-a7b5-29268d0d3645-kube-api-access-dml98\") pod \"keystone-5ece-account-create-update-z78b8\" (UID: \"da8a148e-702c-4553-a7b5-29268d0d3645\") " pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.503011 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.525566 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:50 crc kubenswrapper[4816]: I0216 14:27:50.959437 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-llwbm"] Feb 16 14:27:51 crc kubenswrapper[4816]: I0216 14:27:51.003997 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5ece-account-create-update-z78b8"] Feb 16 14:27:51 crc kubenswrapper[4816]: W0216 14:27:51.014894 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda8a148e_702c_4553_a7b5_29268d0d3645.slice/crio-007262cebd454cfda4cdf3051e304379d93e95b22a855012654de4926f8b3ee4 WatchSource:0}: Error finding container 007262cebd454cfda4cdf3051e304379d93e95b22a855012654de4926f8b3ee4: Status 404 returned error can't find the container with id 007262cebd454cfda4cdf3051e304379d93e95b22a855012654de4926f8b3ee4 Feb 16 14:27:51 crc kubenswrapper[4816]: I0216 14:27:51.167822 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5ece-account-create-update-z78b8" event={"ID":"da8a148e-702c-4553-a7b5-29268d0d3645","Type":"ContainerStarted","Data":"24ce500bee6ff0ad40be30416fc9e59716e8847db51a760725983c4ac75ab433"} Feb 16 14:27:51 crc kubenswrapper[4816]: I0216 14:27:51.168155 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5ece-account-create-update-z78b8" event={"ID":"da8a148e-702c-4553-a7b5-29268d0d3645","Type":"ContainerStarted","Data":"007262cebd454cfda4cdf3051e304379d93e95b22a855012654de4926f8b3ee4"} Feb 16 14:27:51 crc kubenswrapper[4816]: I0216 14:27:51.171556 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-llwbm" event={"ID":"fe106007-900d-482f-8029-155ca0185242","Type":"ContainerStarted","Data":"18660c3f9adf0ee465da93e3bdabea8404f0096997994bc6f39d26d029f0b5fa"} Feb 16 14:27:51 crc kubenswrapper[4816]: I0216 14:27:51.171616 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-llwbm" event={"ID":"fe106007-900d-482f-8029-155ca0185242","Type":"ContainerStarted","Data":"c965d994e49dce62b8a9a7f0f1fd4070b36363930aca203959869c59513bc254"} Feb 16 14:27:51 crc kubenswrapper[4816]: I0216 14:27:51.183278 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5ece-account-create-update-z78b8" podStartSLOduration=1.183256898 podStartE2EDuration="1.183256898s" podCreationTimestamp="2026-02-16 14:27:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:51.180275896 +0000 UTC m=+5070.506989614" watchObservedRunningTime="2026-02-16 14:27:51.183256898 +0000 UTC m=+5070.509970626" Feb 16 14:27:51 crc kubenswrapper[4816]: I0216 14:27:51.201048 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-llwbm" podStartSLOduration=1.201028762 podStartE2EDuration="1.201028762s" podCreationTimestamp="2026-02-16 14:27:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:51.193993411 +0000 UTC m=+5070.520707139" watchObservedRunningTime="2026-02-16 14:27:51.201028762 +0000 UTC m=+5070.527742480" Feb 16 14:27:52 crc kubenswrapper[4816]: I0216 14:27:52.181315 4816 generic.go:334] "Generic (PLEG): container finished" podID="fe106007-900d-482f-8029-155ca0185242" containerID="18660c3f9adf0ee465da93e3bdabea8404f0096997994bc6f39d26d029f0b5fa" exitCode=0 Feb 16 14:27:52 crc kubenswrapper[4816]: I0216 14:27:52.181418 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-llwbm" event={"ID":"fe106007-900d-482f-8029-155ca0185242","Type":"ContainerDied","Data":"18660c3f9adf0ee465da93e3bdabea8404f0096997994bc6f39d26d029f0b5fa"} Feb 16 14:27:52 crc kubenswrapper[4816]: I0216 14:27:52.184613 4816 generic.go:334] "Generic (PLEG): container finished" podID="da8a148e-702c-4553-a7b5-29268d0d3645" containerID="24ce500bee6ff0ad40be30416fc9e59716e8847db51a760725983c4ac75ab433" exitCode=0 Feb 16 14:27:52 crc kubenswrapper[4816]: I0216 14:27:52.184703 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5ece-account-create-update-z78b8" event={"ID":"da8a148e-702c-4553-a7b5-29268d0d3645","Type":"ContainerDied","Data":"24ce500bee6ff0ad40be30416fc9e59716e8847db51a760725983c4ac75ab433"} Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.655483 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.662983 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.804202 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe106007-900d-482f-8029-155ca0185242-operator-scripts\") pod \"fe106007-900d-482f-8029-155ca0185242\" (UID: \"fe106007-900d-482f-8029-155ca0185242\") " Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.804360 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l97dx\" (UniqueName: \"kubernetes.io/projected/fe106007-900d-482f-8029-155ca0185242-kube-api-access-l97dx\") pod \"fe106007-900d-482f-8029-155ca0185242\" (UID: \"fe106007-900d-482f-8029-155ca0185242\") " Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.804878 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe106007-900d-482f-8029-155ca0185242-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fe106007-900d-482f-8029-155ca0185242" (UID: "fe106007-900d-482f-8029-155ca0185242"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.804957 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dml98\" (UniqueName: \"kubernetes.io/projected/da8a148e-702c-4553-a7b5-29268d0d3645-kube-api-access-dml98\") pod \"da8a148e-702c-4553-a7b5-29268d0d3645\" (UID: \"da8a148e-702c-4553-a7b5-29268d0d3645\") " Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.805039 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da8a148e-702c-4553-a7b5-29268d0d3645-operator-scripts\") pod \"da8a148e-702c-4553-a7b5-29268d0d3645\" (UID: \"da8a148e-702c-4553-a7b5-29268d0d3645\") " Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.805551 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe106007-900d-482f-8029-155ca0185242-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.805582 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da8a148e-702c-4553-a7b5-29268d0d3645-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "da8a148e-702c-4553-a7b5-29268d0d3645" (UID: "da8a148e-702c-4553-a7b5-29268d0d3645"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.811907 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da8a148e-702c-4553-a7b5-29268d0d3645-kube-api-access-dml98" (OuterVolumeSpecName: "kube-api-access-dml98") pod "da8a148e-702c-4553-a7b5-29268d0d3645" (UID: "da8a148e-702c-4553-a7b5-29268d0d3645"). InnerVolumeSpecName "kube-api-access-dml98". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.811963 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe106007-900d-482f-8029-155ca0185242-kube-api-access-l97dx" (OuterVolumeSpecName: "kube-api-access-l97dx") pod "fe106007-900d-482f-8029-155ca0185242" (UID: "fe106007-900d-482f-8029-155ca0185242"). InnerVolumeSpecName "kube-api-access-l97dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.907427 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l97dx\" (UniqueName: \"kubernetes.io/projected/fe106007-900d-482f-8029-155ca0185242-kube-api-access-l97dx\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.907504 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dml98\" (UniqueName: \"kubernetes.io/projected/da8a148e-702c-4553-a7b5-29268d0d3645-kube-api-access-dml98\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:53 crc kubenswrapper[4816]: I0216 14:27:53.907537 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da8a148e-702c-4553-a7b5-29268d0d3645-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:27:54 crc kubenswrapper[4816]: I0216 14:27:54.202823 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-llwbm" event={"ID":"fe106007-900d-482f-8029-155ca0185242","Type":"ContainerDied","Data":"c965d994e49dce62b8a9a7f0f1fd4070b36363930aca203959869c59513bc254"} Feb 16 14:27:54 crc kubenswrapper[4816]: I0216 14:27:54.203130 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c965d994e49dce62b8a9a7f0f1fd4070b36363930aca203959869c59513bc254" Feb 16 14:27:54 crc kubenswrapper[4816]: I0216 14:27:54.202974 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-llwbm" Feb 16 14:27:54 crc kubenswrapper[4816]: I0216 14:27:54.205005 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5ece-account-create-update-z78b8" event={"ID":"da8a148e-702c-4553-a7b5-29268d0d3645","Type":"ContainerDied","Data":"007262cebd454cfda4cdf3051e304379d93e95b22a855012654de4926f8b3ee4"} Feb 16 14:27:54 crc kubenswrapper[4816]: I0216 14:27:54.205321 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="007262cebd454cfda4cdf3051e304379d93e95b22a855012654de4926f8b3ee4" Feb 16 14:27:54 crc kubenswrapper[4816]: I0216 14:27:54.205188 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5ece-account-create-update-z78b8" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.415050 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.751282 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-7cfgx"] Feb 16 14:27:55 crc kubenswrapper[4816]: E0216 14:27:55.751710 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da8a148e-702c-4553-a7b5-29268d0d3645" containerName="mariadb-account-create-update" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.751731 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="da8a148e-702c-4553-a7b5-29268d0d3645" containerName="mariadb-account-create-update" Feb 16 14:27:55 crc kubenswrapper[4816]: E0216 14:27:55.751756 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe106007-900d-482f-8029-155ca0185242" containerName="mariadb-database-create" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.751765 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe106007-900d-482f-8029-155ca0185242" containerName="mariadb-database-create" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.751966 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe106007-900d-482f-8029-155ca0185242" containerName="mariadb-database-create" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.751983 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="da8a148e-702c-4553-a7b5-29268d0d3645" containerName="mariadb-account-create-update" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.752641 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.757489 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.761077 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kvg9f" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.761229 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.761511 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.770337 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-7cfgx"] Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.836457 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlmxs\" (UniqueName: \"kubernetes.io/projected/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-kube-api-access-mlmxs\") pod \"keystone-db-sync-7cfgx\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.836841 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-combined-ca-bundle\") pod \"keystone-db-sync-7cfgx\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.836906 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-config-data\") pod \"keystone-db-sync-7cfgx\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.938975 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-combined-ca-bundle\") pod \"keystone-db-sync-7cfgx\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.939021 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-config-data\") pod \"keystone-db-sync-7cfgx\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.939120 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlmxs\" (UniqueName: \"kubernetes.io/projected/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-kube-api-access-mlmxs\") pod \"keystone-db-sync-7cfgx\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.944221 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-combined-ca-bundle\") pod \"keystone-db-sync-7cfgx\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.945087 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-config-data\") pod \"keystone-db-sync-7cfgx\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:55 crc kubenswrapper[4816]: I0216 14:27:55.957932 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlmxs\" (UniqueName: \"kubernetes.io/projected/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-kube-api-access-mlmxs\") pod \"keystone-db-sync-7cfgx\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:56 crc kubenswrapper[4816]: I0216 14:27:56.071557 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:27:56 crc kubenswrapper[4816]: I0216 14:27:56.518514 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-7cfgx"] Feb 16 14:27:57 crc kubenswrapper[4816]: I0216 14:27:57.233573 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7cfgx" event={"ID":"1b29b2b2-90b0-40c4-bcac-6b94ee2af701","Type":"ContainerStarted","Data":"439a80fcf9d208a6d9421bfdb52c81440d46dc5b6f630aae87abe43caaa71e83"} Feb 16 14:27:57 crc kubenswrapper[4816]: I0216 14:27:57.233979 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7cfgx" event={"ID":"1b29b2b2-90b0-40c4-bcac-6b94ee2af701","Type":"ContainerStarted","Data":"7147c939089be7e8d6b0947cfe32689443da49a7620c6999722098e855aabe34"} Feb 16 14:27:57 crc kubenswrapper[4816]: I0216 14:27:57.254429 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-7cfgx" podStartSLOduration=2.254405213 podStartE2EDuration="2.254405213s" podCreationTimestamp="2026-02-16 14:27:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:27:57.249004255 +0000 UTC m=+5076.575717983" watchObservedRunningTime="2026-02-16 14:27:57.254405213 +0000 UTC m=+5076.581118941" Feb 16 14:27:59 crc kubenswrapper[4816]: I0216 14:27:59.251973 4816 generic.go:334] "Generic (PLEG): container finished" podID="1b29b2b2-90b0-40c4-bcac-6b94ee2af701" containerID="439a80fcf9d208a6d9421bfdb52c81440d46dc5b6f630aae87abe43caaa71e83" exitCode=0 Feb 16 14:27:59 crc kubenswrapper[4816]: I0216 14:27:59.252299 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7cfgx" event={"ID":"1b29b2b2-90b0-40c4-bcac-6b94ee2af701","Type":"ContainerDied","Data":"439a80fcf9d208a6d9421bfdb52c81440d46dc5b6f630aae87abe43caaa71e83"} Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.615095 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.720402 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-config-data\") pod \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.720587 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlmxs\" (UniqueName: \"kubernetes.io/projected/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-kube-api-access-mlmxs\") pod \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.720703 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-combined-ca-bundle\") pod \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\" (UID: \"1b29b2b2-90b0-40c4-bcac-6b94ee2af701\") " Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.727286 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-kube-api-access-mlmxs" (OuterVolumeSpecName: "kube-api-access-mlmxs") pod "1b29b2b2-90b0-40c4-bcac-6b94ee2af701" (UID: "1b29b2b2-90b0-40c4-bcac-6b94ee2af701"). InnerVolumeSpecName "kube-api-access-mlmxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.746735 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b29b2b2-90b0-40c4-bcac-6b94ee2af701" (UID: "1b29b2b2-90b0-40c4-bcac-6b94ee2af701"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.762615 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-config-data" (OuterVolumeSpecName: "config-data") pod "1b29b2b2-90b0-40c4-bcac-6b94ee2af701" (UID: "1b29b2b2-90b0-40c4-bcac-6b94ee2af701"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.822829 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.822862 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlmxs\" (UniqueName: \"kubernetes.io/projected/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-kube-api-access-mlmxs\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:00 crc kubenswrapper[4816]: I0216 14:28:00.822873 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b29b2b2-90b0-40c4-bcac-6b94ee2af701-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.274199 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7cfgx" event={"ID":"1b29b2b2-90b0-40c4-bcac-6b94ee2af701","Type":"ContainerDied","Data":"7147c939089be7e8d6b0947cfe32689443da49a7620c6999722098e855aabe34"} Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.274240 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7147c939089be7e8d6b0947cfe32689443da49a7620c6999722098e855aabe34" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.274343 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7cfgx" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.551474 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-97sl2"] Feb 16 14:28:01 crc kubenswrapper[4816]: E0216 14:28:01.552514 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b29b2b2-90b0-40c4-bcac-6b94ee2af701" containerName="keystone-db-sync" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.552537 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b29b2b2-90b0-40c4-bcac-6b94ee2af701" containerName="keystone-db-sync" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.552790 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b29b2b2-90b0-40c4-bcac-6b94ee2af701" containerName="keystone-db-sync" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.553456 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.555970 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.556213 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.556473 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.556719 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kvg9f" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.565619 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.572329 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-97sl2"] Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.586824 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5dc95c5589-66ksr"] Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.588597 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.609085 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dc95c5589-66ksr"] Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.660364 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-config-data\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.660426 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6kbj\" (UniqueName: \"kubernetes.io/projected/8155946a-88e1-4acf-8b72-bb2e47809e76-kube-api-access-s6kbj\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.660489 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-credential-keys\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.660524 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-scripts\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.660597 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-combined-ca-bundle\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.660625 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-fernet-keys\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.762456 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-sb\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.762540 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-config-data\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.762566 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-config\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.762586 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-nb\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.762606 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6kbj\" (UniqueName: \"kubernetes.io/projected/8155946a-88e1-4acf-8b72-bb2e47809e76-kube-api-access-s6kbj\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.762626 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z9gd\" (UniqueName: \"kubernetes.io/projected/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-kube-api-access-6z9gd\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.763010 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-credential-keys\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.763168 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-scripts\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.763295 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-combined-ca-bundle\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.763323 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-fernet-keys\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.763411 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-dns-svc\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.766414 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-config-data\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.766498 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-credential-keys\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.767235 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-fernet-keys\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.767332 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-scripts\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.773421 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-combined-ca-bundle\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.781510 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6kbj\" (UniqueName: \"kubernetes.io/projected/8155946a-88e1-4acf-8b72-bb2e47809e76-kube-api-access-s6kbj\") pod \"keystone-bootstrap-97sl2\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.864454 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-dns-svc\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.864525 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-sb\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.864593 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-config\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.864620 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-nb\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.864647 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z9gd\" (UniqueName: \"kubernetes.io/projected/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-kube-api-access-6z9gd\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.865535 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-dns-svc\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.865610 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-sb\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.865720 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-config\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.865734 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-nb\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.879993 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.882161 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z9gd\" (UniqueName: \"kubernetes.io/projected/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-kube-api-access-6z9gd\") pod \"dnsmasq-dns-5dc95c5589-66ksr\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:01 crc kubenswrapper[4816]: I0216 14:28:01.915773 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:02 crc kubenswrapper[4816]: W0216 14:28:02.338152 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8155946a_88e1_4acf_8b72_bb2e47809e76.slice/crio-87653726ddb3e30665f4098f53a4bd55ce7ddb632989a822176a48abdd88ebdf WatchSource:0}: Error finding container 87653726ddb3e30665f4098f53a4bd55ce7ddb632989a822176a48abdd88ebdf: Status 404 returned error can't find the container with id 87653726ddb3e30665f4098f53a4bd55ce7ddb632989a822176a48abdd88ebdf Feb 16 14:28:02 crc kubenswrapper[4816]: I0216 14:28:02.345440 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-97sl2"] Feb 16 14:28:02 crc kubenswrapper[4816]: I0216 14:28:02.456341 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dc95c5589-66ksr"] Feb 16 14:28:03 crc kubenswrapper[4816]: I0216 14:28:03.290776 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-97sl2" event={"ID":"8155946a-88e1-4acf-8b72-bb2e47809e76","Type":"ContainerStarted","Data":"cf994ff080aa17b308e0aa59c8a7d65bfad202f529a1114a759d79ae58b2bf4e"} Feb 16 14:28:03 crc kubenswrapper[4816]: I0216 14:28:03.292452 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-97sl2" event={"ID":"8155946a-88e1-4acf-8b72-bb2e47809e76","Type":"ContainerStarted","Data":"87653726ddb3e30665f4098f53a4bd55ce7ddb632989a822176a48abdd88ebdf"} Feb 16 14:28:03 crc kubenswrapper[4816]: I0216 14:28:03.292774 4816 generic.go:334] "Generic (PLEG): container finished" podID="1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" containerID="73703f7edc6485d63cfa77a419ba84cba8124dbafaa61f4082e3d6891d650af4" exitCode=0 Feb 16 14:28:03 crc kubenswrapper[4816]: I0216 14:28:03.292836 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" event={"ID":"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e","Type":"ContainerDied","Data":"73703f7edc6485d63cfa77a419ba84cba8124dbafaa61f4082e3d6891d650af4"} Feb 16 14:28:03 crc kubenswrapper[4816]: I0216 14:28:03.292870 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" event={"ID":"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e","Type":"ContainerStarted","Data":"04ab91e432efd082e9b633525480c3101b775d3bd01e9812a70ad0dfde3f19bc"} Feb 16 14:28:03 crc kubenswrapper[4816]: I0216 14:28:03.318212 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-97sl2" podStartSLOduration=2.318182237 podStartE2EDuration="2.318182237s" podCreationTimestamp="2026-02-16 14:28:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:28:03.309809328 +0000 UTC m=+5082.636523056" watchObservedRunningTime="2026-02-16 14:28:03.318182237 +0000 UTC m=+5082.644895985" Feb 16 14:28:04 crc kubenswrapper[4816]: I0216 14:28:04.306605 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" event={"ID":"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e","Type":"ContainerStarted","Data":"4c1221ec7a3253500b8f72bb7db628e1cd63a3035b13340a7b6bedbbd634ec4d"} Feb 16 14:28:04 crc kubenswrapper[4816]: I0216 14:28:04.307010 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:04 crc kubenswrapper[4816]: I0216 14:28:04.335314 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" podStartSLOduration=3.335292289 podStartE2EDuration="3.335292289s" podCreationTimestamp="2026-02-16 14:28:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:28:04.328584515 +0000 UTC m=+5083.655298253" watchObservedRunningTime="2026-02-16 14:28:04.335292289 +0000 UTC m=+5083.662006017" Feb 16 14:28:06 crc kubenswrapper[4816]: I0216 14:28:06.332401 4816 generic.go:334] "Generic (PLEG): container finished" podID="8155946a-88e1-4acf-8b72-bb2e47809e76" containerID="cf994ff080aa17b308e0aa59c8a7d65bfad202f529a1114a759d79ae58b2bf4e" exitCode=0 Feb 16 14:28:06 crc kubenswrapper[4816]: I0216 14:28:06.332467 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-97sl2" event={"ID":"8155946a-88e1-4acf-8b72-bb2e47809e76","Type":"ContainerDied","Data":"cf994ff080aa17b308e0aa59c8a7d65bfad202f529a1114a759d79ae58b2bf4e"} Feb 16 14:28:06 crc kubenswrapper[4816]: I0216 14:28:06.941384 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:28:06 crc kubenswrapper[4816]: I0216 14:28:06.941929 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:28:06 crc kubenswrapper[4816]: I0216 14:28:06.942205 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:28:06 crc kubenswrapper[4816]: I0216 14:28:06.943610 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"90e2f0f25ce572784388a1d1c59ee443c4cd8305fe9cbe117e83babcb64c00c6"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:28:06 crc kubenswrapper[4816]: I0216 14:28:06.943967 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://90e2f0f25ce572784388a1d1c59ee443c4cd8305fe9cbe117e83babcb64c00c6" gracePeriod=600 Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.342353 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="90e2f0f25ce572784388a1d1c59ee443c4cd8305fe9cbe117e83babcb64c00c6" exitCode=0 Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.342440 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"90e2f0f25ce572784388a1d1c59ee443c4cd8305fe9cbe117e83babcb64c00c6"} Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.342691 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9"} Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.342712 4816 scope.go:117] "RemoveContainer" containerID="c27aa6eb4b3a9248630c0927f8aef8160e33716ebd4b02d60116c38a77a62690" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.691677 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.862232 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-combined-ca-bundle\") pod \"8155946a-88e1-4acf-8b72-bb2e47809e76\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.862375 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-scripts\") pod \"8155946a-88e1-4acf-8b72-bb2e47809e76\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.862480 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-config-data\") pod \"8155946a-88e1-4acf-8b72-bb2e47809e76\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.862557 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-fernet-keys\") pod \"8155946a-88e1-4acf-8b72-bb2e47809e76\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.862595 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6kbj\" (UniqueName: \"kubernetes.io/projected/8155946a-88e1-4acf-8b72-bb2e47809e76-kube-api-access-s6kbj\") pod \"8155946a-88e1-4acf-8b72-bb2e47809e76\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.862628 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-credential-keys\") pod \"8155946a-88e1-4acf-8b72-bb2e47809e76\" (UID: \"8155946a-88e1-4acf-8b72-bb2e47809e76\") " Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.868030 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8155946a-88e1-4acf-8b72-bb2e47809e76" (UID: "8155946a-88e1-4acf-8b72-bb2e47809e76"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.868537 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-scripts" (OuterVolumeSpecName: "scripts") pod "8155946a-88e1-4acf-8b72-bb2e47809e76" (UID: "8155946a-88e1-4acf-8b72-bb2e47809e76"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.877649 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8155946a-88e1-4acf-8b72-bb2e47809e76-kube-api-access-s6kbj" (OuterVolumeSpecName: "kube-api-access-s6kbj") pod "8155946a-88e1-4acf-8b72-bb2e47809e76" (UID: "8155946a-88e1-4acf-8b72-bb2e47809e76"). InnerVolumeSpecName "kube-api-access-s6kbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.877873 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8155946a-88e1-4acf-8b72-bb2e47809e76" (UID: "8155946a-88e1-4acf-8b72-bb2e47809e76"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.888593 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8155946a-88e1-4acf-8b72-bb2e47809e76" (UID: "8155946a-88e1-4acf-8b72-bb2e47809e76"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.889818 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-config-data" (OuterVolumeSpecName: "config-data") pod "8155946a-88e1-4acf-8b72-bb2e47809e76" (UID: "8155946a-88e1-4acf-8b72-bb2e47809e76"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.964461 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.964496 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.964511 4816 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.964524 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6kbj\" (UniqueName: \"kubernetes.io/projected/8155946a-88e1-4acf-8b72-bb2e47809e76-kube-api-access-s6kbj\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.964537 4816 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:07 crc kubenswrapper[4816]: I0216 14:28:07.964552 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8155946a-88e1-4acf-8b72-bb2e47809e76-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.358337 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-97sl2" event={"ID":"8155946a-88e1-4acf-8b72-bb2e47809e76","Type":"ContainerDied","Data":"87653726ddb3e30665f4098f53a4bd55ce7ddb632989a822176a48abdd88ebdf"} Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.358427 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87653726ddb3e30665f4098f53a4bd55ce7ddb632989a822176a48abdd88ebdf" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.358592 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-97sl2" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.426441 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-97sl2"] Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.436104 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-97sl2"] Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.535314 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-lq2b6"] Feb 16 14:28:08 crc kubenswrapper[4816]: E0216 14:28:08.535714 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8155946a-88e1-4acf-8b72-bb2e47809e76" containerName="keystone-bootstrap" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.535728 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8155946a-88e1-4acf-8b72-bb2e47809e76" containerName="keystone-bootstrap" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.535953 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8155946a-88e1-4acf-8b72-bb2e47809e76" containerName="keystone-bootstrap" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.536979 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.539166 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.539254 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kvg9f" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.539176 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.539926 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.541841 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.558992 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lq2b6"] Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.586157 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-combined-ca-bundle\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.586203 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-credential-keys\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.586247 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-fernet-keys\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.586284 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-scripts\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.586356 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh78m\" (UniqueName: \"kubernetes.io/projected/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-kube-api-access-zh78m\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.586381 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-config-data\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.687863 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-combined-ca-bundle\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.687901 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-credential-keys\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.687940 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-fernet-keys\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.687975 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-scripts\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.688028 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh78m\" (UniqueName: \"kubernetes.io/projected/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-kube-api-access-zh78m\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.688049 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-config-data\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.693769 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-credential-keys\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.695175 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-config-data\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.695677 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-fernet-keys\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.695860 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-combined-ca-bundle\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.699256 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-scripts\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.705238 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh78m\" (UniqueName: \"kubernetes.io/projected/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-kube-api-access-zh78m\") pod \"keystone-bootstrap-lq2b6\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:08 crc kubenswrapper[4816]: I0216 14:28:08.855429 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:09 crc kubenswrapper[4816]: I0216 14:28:09.280491 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lq2b6"] Feb 16 14:28:09 crc kubenswrapper[4816]: I0216 14:28:09.368140 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lq2b6" event={"ID":"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2","Type":"ContainerStarted","Data":"273848175bc454521c5707284d4df7cbf1a535ae9cc199621000184cc565f10a"} Feb 16 14:28:09 crc kubenswrapper[4816]: I0216 14:28:09.410702 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8155946a-88e1-4acf-8b72-bb2e47809e76" path="/var/lib/kubelet/pods/8155946a-88e1-4acf-8b72-bb2e47809e76/volumes" Feb 16 14:28:10 crc kubenswrapper[4816]: I0216 14:28:10.376315 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lq2b6" event={"ID":"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2","Type":"ContainerStarted","Data":"29dd50f0854ebcc63c2e1528c48488fcb5f16d73b9a034d8cac0d75db7e04b24"} Feb 16 14:28:10 crc kubenswrapper[4816]: I0216 14:28:10.401317 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-lq2b6" podStartSLOduration=2.401297794 podStartE2EDuration="2.401297794s" podCreationTimestamp="2026-02-16 14:28:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:28:10.393729577 +0000 UTC m=+5089.720443325" watchObservedRunningTime="2026-02-16 14:28:10.401297794 +0000 UTC m=+5089.728011532" Feb 16 14:28:11 crc kubenswrapper[4816]: I0216 14:28:11.917617 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:28:11 crc kubenswrapper[4816]: I0216 14:28:11.984103 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c5d6d45cc-7dcf4"] Feb 16 14:28:11 crc kubenswrapper[4816]: I0216 14:28:11.984445 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" podUID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" containerName="dnsmasq-dns" containerID="cri-o://53c283b729b18f93462be9d74d6b4192007f3bd899ed5641ed30b51676c830ae" gracePeriod=10 Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.087398 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" podUID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.10:5353: connect: connection refused" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.398226 4816 generic.go:334] "Generic (PLEG): container finished" podID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" containerID="53c283b729b18f93462be9d74d6b4192007f3bd899ed5641ed30b51676c830ae" exitCode=0 Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.398290 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" event={"ID":"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c","Type":"ContainerDied","Data":"53c283b729b18f93462be9d74d6b4192007f3bd899ed5641ed30b51676c830ae"} Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.398541 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" event={"ID":"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c","Type":"ContainerDied","Data":"653d3deb1cbaac3d77468542ce8c854f5ded32826791e337f82285ae52453e11"} Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.398554 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="653d3deb1cbaac3d77468542ce8c854f5ded32826791e337f82285ae52453e11" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.400990 4816 generic.go:334] "Generic (PLEG): container finished" podID="d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" containerID="29dd50f0854ebcc63c2e1528c48488fcb5f16d73b9a034d8cac0d75db7e04b24" exitCode=0 Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.401018 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lq2b6" event={"ID":"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2","Type":"ContainerDied","Data":"29dd50f0854ebcc63c2e1528c48488fcb5f16d73b9a034d8cac0d75db7e04b24"} Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.447144 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.550734 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-sb\") pod \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.550784 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-nb\") pod \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.550818 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9jsm\" (UniqueName: \"kubernetes.io/projected/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-kube-api-access-m9jsm\") pod \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.550960 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-dns-svc\") pod \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.551000 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-config\") pod \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\" (UID: \"fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c\") " Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.556468 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-kube-api-access-m9jsm" (OuterVolumeSpecName: "kube-api-access-m9jsm") pod "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" (UID: "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c"). InnerVolumeSpecName "kube-api-access-m9jsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.594096 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" (UID: "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.597268 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" (UID: "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.601088 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-config" (OuterVolumeSpecName: "config") pod "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" (UID: "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.604400 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" (UID: "fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.653083 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.653149 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.653163 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.653174 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:12 crc kubenswrapper[4816]: I0216 14:28:12.653186 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9jsm\" (UniqueName: \"kubernetes.io/projected/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c-kube-api-access-m9jsm\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.410021 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c5d6d45cc-7dcf4" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.455052 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c5d6d45cc-7dcf4"] Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.464800 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c5d6d45cc-7dcf4"] Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.732888 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.872936 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-credential-keys\") pod \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.872992 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-fernet-keys\") pod \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.873061 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-config-data\") pod \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.873093 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zh78m\" (UniqueName: \"kubernetes.io/projected/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-kube-api-access-zh78m\") pod \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.873137 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-combined-ca-bundle\") pod \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.873162 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-scripts\") pod \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\" (UID: \"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2\") " Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.877877 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-scripts" (OuterVolumeSpecName: "scripts") pod "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" (UID: "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.877922 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-kube-api-access-zh78m" (OuterVolumeSpecName: "kube-api-access-zh78m") pod "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" (UID: "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2"). InnerVolumeSpecName "kube-api-access-zh78m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.878011 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" (UID: "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.878793 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" (UID: "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.896190 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" (UID: "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.904016 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-config-data" (OuterVolumeSpecName: "config-data") pod "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" (UID: "d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.975676 4816 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.975719 4816 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.975728 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.975739 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zh78m\" (UniqueName: \"kubernetes.io/projected/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-kube-api-access-zh78m\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.975749 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:13 crc kubenswrapper[4816]: I0216 14:28:13.975757 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.418975 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lq2b6" event={"ID":"d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2","Type":"ContainerDied","Data":"273848175bc454521c5707284d4df7cbf1a535ae9cc199621000184cc565f10a"} Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.419324 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="273848175bc454521c5707284d4df7cbf1a535ae9cc199621000184cc565f10a" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.419027 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lq2b6" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.495367 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c89c9568d-8xkdd"] Feb 16 14:28:14 crc kubenswrapper[4816]: E0216 14:28:14.495922 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" containerName="dnsmasq-dns" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.495958 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" containerName="dnsmasq-dns" Feb 16 14:28:14 crc kubenswrapper[4816]: E0216 14:28:14.496006 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" containerName="keystone-bootstrap" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.496020 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" containerName="keystone-bootstrap" Feb 16 14:28:14 crc kubenswrapper[4816]: E0216 14:28:14.496057 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" containerName="init" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.496071 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" containerName="init" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.496382 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" containerName="dnsmasq-dns" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.496449 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" containerName="keystone-bootstrap" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.497369 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.502524 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.502848 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kvg9f" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.503050 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.503584 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.509896 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c89c9568d-8xkdd"] Feb 16 14:28:14 crc kubenswrapper[4816]: E0216 14:28:14.579327 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2bcf70e_7911_4f9a_bf71_ae6b8b8e43b2.slice/crio-273848175bc454521c5707284d4df7cbf1a535ae9cc199621000184cc565f10a\": RecentStats: unable to find data in memory cache]" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.687600 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwbhx\" (UniqueName: \"kubernetes.io/projected/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-kube-api-access-dwbhx\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.687670 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-combined-ca-bundle\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.687690 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-config-data\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.687802 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-scripts\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.687825 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-fernet-keys\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.687855 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-credential-keys\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.789999 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwbhx\" (UniqueName: \"kubernetes.io/projected/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-kube-api-access-dwbhx\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.790060 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-combined-ca-bundle\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.790090 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-config-data\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.790148 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-scripts\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.790178 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-fernet-keys\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.790214 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-credential-keys\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.795430 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-combined-ca-bundle\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.795516 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-credential-keys\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.795801 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-config-data\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.796589 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-fernet-keys\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.806397 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-scripts\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.809725 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwbhx\" (UniqueName: \"kubernetes.io/projected/0b6512c2-dc55-4f1d-baf1-d1e901f90a07-kube-api-access-dwbhx\") pod \"keystone-c89c9568d-8xkdd\" (UID: \"0b6512c2-dc55-4f1d-baf1-d1e901f90a07\") " pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:14 crc kubenswrapper[4816]: I0216 14:28:14.824774 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:15 crc kubenswrapper[4816]: I0216 14:28:15.314808 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c89c9568d-8xkdd"] Feb 16 14:28:15 crc kubenswrapper[4816]: W0216 14:28:15.321822 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6512c2_dc55_4f1d_baf1_d1e901f90a07.slice/crio-b593414925bbf10f6636e95f29b1fb38ad295e0175cdf8f5b35d5ef3b2f88f8d WatchSource:0}: Error finding container b593414925bbf10f6636e95f29b1fb38ad295e0175cdf8f5b35d5ef3b2f88f8d: Status 404 returned error can't find the container with id b593414925bbf10f6636e95f29b1fb38ad295e0175cdf8f5b35d5ef3b2f88f8d Feb 16 14:28:15 crc kubenswrapper[4816]: I0216 14:28:15.425356 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c" path="/var/lib/kubelet/pods/fdda3ff5-4e68-41bf-a02d-6c5c3d5f854c/volumes" Feb 16 14:28:15 crc kubenswrapper[4816]: I0216 14:28:15.428008 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c89c9568d-8xkdd" event={"ID":"0b6512c2-dc55-4f1d-baf1-d1e901f90a07","Type":"ContainerStarted","Data":"b593414925bbf10f6636e95f29b1fb38ad295e0175cdf8f5b35d5ef3b2f88f8d"} Feb 16 14:28:16 crc kubenswrapper[4816]: I0216 14:28:16.442042 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c89c9568d-8xkdd" event={"ID":"0b6512c2-dc55-4f1d-baf1-d1e901f90a07","Type":"ContainerStarted","Data":"c4f81a2879d335fc34b273d563ecc900d955f3e9783ad6cfa90bdfabb9864747"} Feb 16 14:28:16 crc kubenswrapper[4816]: I0216 14:28:16.443165 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:16 crc kubenswrapper[4816]: I0216 14:28:16.468598 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-c89c9568d-8xkdd" podStartSLOduration=2.468573793 podStartE2EDuration="2.468573793s" podCreationTimestamp="2026-02-16 14:28:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:28:16.461024737 +0000 UTC m=+5095.787738455" watchObservedRunningTime="2026-02-16 14:28:16.468573793 +0000 UTC m=+5095.795287531" Feb 16 14:28:46 crc kubenswrapper[4816]: I0216 14:28:46.415268 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-c89c9568d-8xkdd" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.056608 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.058283 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.063338 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.069992 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-mgn4x" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.070564 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.071177 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.165636 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config-secret\") pod \"openstackclient\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.165747 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config\") pod \"openstackclient\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.166268 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbk8m\" (UniqueName: \"kubernetes.io/projected/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-kube-api-access-bbk8m\") pod \"openstackclient\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.313823 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config-secret\") pod \"openstackclient\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.313883 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config\") pod \"openstackclient\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.313918 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbk8m\" (UniqueName: \"kubernetes.io/projected/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-kube-api-access-bbk8m\") pod \"openstackclient\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.315048 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config\") pod \"openstackclient\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.326845 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config-secret\") pod \"openstackclient\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.334244 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbk8m\" (UniqueName: \"kubernetes.io/projected/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-kube-api-access-bbk8m\") pod \"openstackclient\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.381534 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.839331 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 16 14:28:50 crc kubenswrapper[4816]: I0216 14:28:50.857707 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"28db99cc-bb32-4a18-a2a8-c8daafee6f8b","Type":"ContainerStarted","Data":"15be93bc478609088562baecbad1f5b46e3d9648910c02d39302a12b2eb9a5cb"} Feb 16 14:28:51 crc kubenswrapper[4816]: I0216 14:28:51.869399 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"28db99cc-bb32-4a18-a2a8-c8daafee6f8b","Type":"ContainerStarted","Data":"23de134a2ee3fcf44c85a5ad41927b45621bb4b0672a9aea24be6b22ca8e14c4"} Feb 16 14:28:51 crc kubenswrapper[4816]: I0216 14:28:51.893288 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.893214344 podStartE2EDuration="1.893214344s" podCreationTimestamp="2026-02-16 14:28:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:28:51.885463603 +0000 UTC m=+5131.212177341" watchObservedRunningTime="2026-02-16 14:28:51.893214344 +0000 UTC m=+5131.219928112" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.157646 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx"] Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.159567 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.163090 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.163390 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.169893 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx"] Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.336870 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89r48\" (UniqueName: \"kubernetes.io/projected/a578cf17-945b-4b1e-a4a1-3b14c35d4115-kube-api-access-89r48\") pod \"collect-profiles-29520870-kqrnx\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.337160 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a578cf17-945b-4b1e-a4a1-3b14c35d4115-secret-volume\") pod \"collect-profiles-29520870-kqrnx\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.337313 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a578cf17-945b-4b1e-a4a1-3b14c35d4115-config-volume\") pod \"collect-profiles-29520870-kqrnx\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.439413 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89r48\" (UniqueName: \"kubernetes.io/projected/a578cf17-945b-4b1e-a4a1-3b14c35d4115-kube-api-access-89r48\") pod \"collect-profiles-29520870-kqrnx\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.439465 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a578cf17-945b-4b1e-a4a1-3b14c35d4115-secret-volume\") pod \"collect-profiles-29520870-kqrnx\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.439500 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a578cf17-945b-4b1e-a4a1-3b14c35d4115-config-volume\") pod \"collect-profiles-29520870-kqrnx\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.440530 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a578cf17-945b-4b1e-a4a1-3b14c35d4115-config-volume\") pod \"collect-profiles-29520870-kqrnx\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.455194 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a578cf17-945b-4b1e-a4a1-3b14c35d4115-secret-volume\") pod \"collect-profiles-29520870-kqrnx\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.469681 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89r48\" (UniqueName: \"kubernetes.io/projected/a578cf17-945b-4b1e-a4a1-3b14c35d4115-kube-api-access-89r48\") pod \"collect-profiles-29520870-kqrnx\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.488681 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:00 crc kubenswrapper[4816]: I0216 14:30:00.934406 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx"] Feb 16 14:30:00 crc kubenswrapper[4816]: W0216 14:30:00.959267 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda578cf17_945b_4b1e_a4a1_3b14c35d4115.slice/crio-c91f3ef32b1df421558f9a72da51a12844b8a2f7edbacfe4906acefd47a2e2fd WatchSource:0}: Error finding container c91f3ef32b1df421558f9a72da51a12844b8a2f7edbacfe4906acefd47a2e2fd: Status 404 returned error can't find the container with id c91f3ef32b1df421558f9a72da51a12844b8a2f7edbacfe4906acefd47a2e2fd Feb 16 14:30:01 crc kubenswrapper[4816]: I0216 14:30:01.475477 4816 generic.go:334] "Generic (PLEG): container finished" podID="a578cf17-945b-4b1e-a4a1-3b14c35d4115" containerID="0a4df06fbeb1564cef27a8a31a5c0e168c5cb5ee0964133883ea23ab4452e508" exitCode=0 Feb 16 14:30:01 crc kubenswrapper[4816]: I0216 14:30:01.475593 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" event={"ID":"a578cf17-945b-4b1e-a4a1-3b14c35d4115","Type":"ContainerDied","Data":"0a4df06fbeb1564cef27a8a31a5c0e168c5cb5ee0964133883ea23ab4452e508"} Feb 16 14:30:01 crc kubenswrapper[4816]: I0216 14:30:01.475787 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" event={"ID":"a578cf17-945b-4b1e-a4a1-3b14c35d4115","Type":"ContainerStarted","Data":"c91f3ef32b1df421558f9a72da51a12844b8a2f7edbacfe4906acefd47a2e2fd"} Feb 16 14:30:02 crc kubenswrapper[4816]: I0216 14:30:02.804066 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.013555 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a578cf17-945b-4b1e-a4a1-3b14c35d4115-config-volume" (OuterVolumeSpecName: "config-volume") pod "a578cf17-945b-4b1e-a4a1-3b14c35d4115" (UID: "a578cf17-945b-4b1e-a4a1-3b14c35d4115"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.012725 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a578cf17-945b-4b1e-a4a1-3b14c35d4115-config-volume\") pod \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.013690 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a578cf17-945b-4b1e-a4a1-3b14c35d4115-secret-volume\") pod \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.014432 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89r48\" (UniqueName: \"kubernetes.io/projected/a578cf17-945b-4b1e-a4a1-3b14c35d4115-kube-api-access-89r48\") pod \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\" (UID: \"a578cf17-945b-4b1e-a4a1-3b14c35d4115\") " Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.014707 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a578cf17-945b-4b1e-a4a1-3b14c35d4115-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.019370 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a578cf17-945b-4b1e-a4a1-3b14c35d4115-kube-api-access-89r48" (OuterVolumeSpecName: "kube-api-access-89r48") pod "a578cf17-945b-4b1e-a4a1-3b14c35d4115" (UID: "a578cf17-945b-4b1e-a4a1-3b14c35d4115"). InnerVolumeSpecName "kube-api-access-89r48". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.022484 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a578cf17-945b-4b1e-a4a1-3b14c35d4115-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a578cf17-945b-4b1e-a4a1-3b14c35d4115" (UID: "a578cf17-945b-4b1e-a4a1-3b14c35d4115"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.115687 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89r48\" (UniqueName: \"kubernetes.io/projected/a578cf17-945b-4b1e-a4a1-3b14c35d4115-kube-api-access-89r48\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.115728 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a578cf17-945b-4b1e-a4a1-3b14c35d4115-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.489323 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" event={"ID":"a578cf17-945b-4b1e-a4a1-3b14c35d4115","Type":"ContainerDied","Data":"c91f3ef32b1df421558f9a72da51a12844b8a2f7edbacfe4906acefd47a2e2fd"} Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.489376 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c91f3ef32b1df421558f9a72da51a12844b8a2f7edbacfe4906acefd47a2e2fd" Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.489377 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx" Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.872154 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn"] Feb 16 14:30:03 crc kubenswrapper[4816]: I0216 14:30:03.881087 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520825-hjbbn"] Feb 16 14:30:05 crc kubenswrapper[4816]: I0216 14:30:05.408688 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be5767d1-220b-450a-ba86-ac360d4707e4" path="/var/lib/kubelet/pods/be5767d1-220b-450a-ba86-ac360d4707e4/volumes" Feb 16 14:30:26 crc kubenswrapper[4816]: I0216 14:30:26.823416 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fqh9s"] Feb 16 14:30:26 crc kubenswrapper[4816]: E0216 14:30:26.824262 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a578cf17-945b-4b1e-a4a1-3b14c35d4115" containerName="collect-profiles" Feb 16 14:30:26 crc kubenswrapper[4816]: I0216 14:30:26.824281 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a578cf17-945b-4b1e-a4a1-3b14c35d4115" containerName="collect-profiles" Feb 16 14:30:26 crc kubenswrapper[4816]: I0216 14:30:26.824450 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a578cf17-945b-4b1e-a4a1-3b14c35d4115" containerName="collect-profiles" Feb 16 14:30:26 crc kubenswrapper[4816]: I0216 14:30:26.826640 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:26 crc kubenswrapper[4816]: I0216 14:30:26.864623 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fqh9s"] Feb 16 14:30:26 crc kubenswrapper[4816]: I0216 14:30:26.930225 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-catalog-content\") pod \"certified-operators-fqh9s\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:26 crc kubenswrapper[4816]: I0216 14:30:26.930277 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdf9m\" (UniqueName: \"kubernetes.io/projected/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-kube-api-access-qdf9m\") pod \"certified-operators-fqh9s\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:26 crc kubenswrapper[4816]: I0216 14:30:26.930326 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-utilities\") pod \"certified-operators-fqh9s\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:27 crc kubenswrapper[4816]: I0216 14:30:27.032330 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-utilities\") pod \"certified-operators-fqh9s\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:27 crc kubenswrapper[4816]: I0216 14:30:27.032477 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-catalog-content\") pod \"certified-operators-fqh9s\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:27 crc kubenswrapper[4816]: I0216 14:30:27.032526 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdf9m\" (UniqueName: \"kubernetes.io/projected/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-kube-api-access-qdf9m\") pod \"certified-operators-fqh9s\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:27 crc kubenswrapper[4816]: I0216 14:30:27.032802 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-utilities\") pod \"certified-operators-fqh9s\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:27 crc kubenswrapper[4816]: I0216 14:30:27.033096 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-catalog-content\") pod \"certified-operators-fqh9s\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:27 crc kubenswrapper[4816]: I0216 14:30:27.052199 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdf9m\" (UniqueName: \"kubernetes.io/projected/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-kube-api-access-qdf9m\") pod \"certified-operators-fqh9s\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:27 crc kubenswrapper[4816]: I0216 14:30:27.199674 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:27 crc kubenswrapper[4816]: I0216 14:30:27.367601 4816 scope.go:117] "RemoveContainer" containerID="cfa94495e332b1246273a57aa51fab612d4d2e7922b556b917fd591641968d1a" Feb 16 14:30:27 crc kubenswrapper[4816]: I0216 14:30:27.805751 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fqh9s"] Feb 16 14:30:28 crc kubenswrapper[4816]: I0216 14:30:28.709359 4816 generic.go:334] "Generic (PLEG): container finished" podID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerID="fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68" exitCode=0 Feb 16 14:30:28 crc kubenswrapper[4816]: I0216 14:30:28.709424 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqh9s" event={"ID":"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf","Type":"ContainerDied","Data":"fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68"} Feb 16 14:30:28 crc kubenswrapper[4816]: I0216 14:30:28.709761 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqh9s" event={"ID":"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf","Type":"ContainerStarted","Data":"9e69aece2f455e94ad3fd263e04dde976a801aeacdf2316c6858a5de241b968a"} Feb 16 14:30:28 crc kubenswrapper[4816]: I0216 14:30:28.711645 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 14:30:30 crc kubenswrapper[4816]: I0216 14:30:30.726083 4816 generic.go:334] "Generic (PLEG): container finished" podID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerID="bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a" exitCode=0 Feb 16 14:30:30 crc kubenswrapper[4816]: I0216 14:30:30.726132 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqh9s" event={"ID":"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf","Type":"ContainerDied","Data":"bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a"} Feb 16 14:30:31 crc kubenswrapper[4816]: I0216 14:30:31.745312 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqh9s" event={"ID":"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf","Type":"ContainerStarted","Data":"8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c"} Feb 16 14:30:31 crc kubenswrapper[4816]: I0216 14:30:31.764503 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fqh9s" podStartSLOduration=3.217991003 podStartE2EDuration="5.76448359s" podCreationTimestamp="2026-02-16 14:30:26 +0000 UTC" firstStartedPulling="2026-02-16 14:30:28.711368024 +0000 UTC m=+5228.038081742" lastFinishedPulling="2026-02-16 14:30:31.257860601 +0000 UTC m=+5230.584574329" observedRunningTime="2026-02-16 14:30:31.764231683 +0000 UTC m=+5231.090945421" watchObservedRunningTime="2026-02-16 14:30:31.76448359 +0000 UTC m=+5231.091197318" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.304614 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-z9gf4"] Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.305765 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.314312 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8983-account-create-update-c6djt"] Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.315331 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.317292 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.326073 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-z9gf4"] Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.349451 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8983-account-create-update-c6djt"] Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.425632 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-operator-scripts\") pod \"barbican-8983-account-create-update-c6djt\" (UID: \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\") " pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.425686 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpfwt\" (UniqueName: \"kubernetes.io/projected/842ab8ea-5656-4bb5-8553-db49b36e48d2-kube-api-access-tpfwt\") pod \"barbican-db-create-z9gf4\" (UID: \"842ab8ea-5656-4bb5-8553-db49b36e48d2\") " pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.425743 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842ab8ea-5656-4bb5-8553-db49b36e48d2-operator-scripts\") pod \"barbican-db-create-z9gf4\" (UID: \"842ab8ea-5656-4bb5-8553-db49b36e48d2\") " pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.425771 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jhdq\" (UniqueName: \"kubernetes.io/projected/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-kube-api-access-5jhdq\") pod \"barbican-8983-account-create-update-c6djt\" (UID: \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\") " pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.527604 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-operator-scripts\") pod \"barbican-8983-account-create-update-c6djt\" (UID: \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\") " pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.527646 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpfwt\" (UniqueName: \"kubernetes.io/projected/842ab8ea-5656-4bb5-8553-db49b36e48d2-kube-api-access-tpfwt\") pod \"barbican-db-create-z9gf4\" (UID: \"842ab8ea-5656-4bb5-8553-db49b36e48d2\") " pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.527705 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842ab8ea-5656-4bb5-8553-db49b36e48d2-operator-scripts\") pod \"barbican-db-create-z9gf4\" (UID: \"842ab8ea-5656-4bb5-8553-db49b36e48d2\") " pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.527742 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jhdq\" (UniqueName: \"kubernetes.io/projected/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-kube-api-access-5jhdq\") pod \"barbican-8983-account-create-update-c6djt\" (UID: \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\") " pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.528756 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-operator-scripts\") pod \"barbican-8983-account-create-update-c6djt\" (UID: \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\") " pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.528888 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842ab8ea-5656-4bb5-8553-db49b36e48d2-operator-scripts\") pod \"barbican-db-create-z9gf4\" (UID: \"842ab8ea-5656-4bb5-8553-db49b36e48d2\") " pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.546954 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpfwt\" (UniqueName: \"kubernetes.io/projected/842ab8ea-5656-4bb5-8553-db49b36e48d2-kube-api-access-tpfwt\") pod \"barbican-db-create-z9gf4\" (UID: \"842ab8ea-5656-4bb5-8553-db49b36e48d2\") " pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.555728 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jhdq\" (UniqueName: \"kubernetes.io/projected/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-kube-api-access-5jhdq\") pod \"barbican-8983-account-create-update-c6djt\" (UID: \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\") " pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.626316 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:32 crc kubenswrapper[4816]: I0216 14:30:32.648458 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:33 crc kubenswrapper[4816]: I0216 14:30:33.093047 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-z9gf4"] Feb 16 14:30:33 crc kubenswrapper[4816]: W0216 14:30:33.095228 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod842ab8ea_5656_4bb5_8553_db49b36e48d2.slice/crio-0a6d7f572e03c1808ad162ba945240452aa304cb4335fa25c1baf3d05e6cce30 WatchSource:0}: Error finding container 0a6d7f572e03c1808ad162ba945240452aa304cb4335fa25c1baf3d05e6cce30: Status 404 returned error can't find the container with id 0a6d7f572e03c1808ad162ba945240452aa304cb4335fa25c1baf3d05e6cce30 Feb 16 14:30:33 crc kubenswrapper[4816]: I0216 14:30:33.153476 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8983-account-create-update-c6djt"] Feb 16 14:30:33 crc kubenswrapper[4816]: I0216 14:30:33.768913 4816 generic.go:334] "Generic (PLEG): container finished" podID="e6a37227-d45e-4faa-99a5-9a5d9e8ed031" containerID="405bf524d4286d584d4870fdb842524905a41069c2513add28e4557071d1efec" exitCode=0 Feb 16 14:30:33 crc kubenswrapper[4816]: I0216 14:30:33.769021 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8983-account-create-update-c6djt" event={"ID":"e6a37227-d45e-4faa-99a5-9a5d9e8ed031","Type":"ContainerDied","Data":"405bf524d4286d584d4870fdb842524905a41069c2513add28e4557071d1efec"} Feb 16 14:30:33 crc kubenswrapper[4816]: I0216 14:30:33.769059 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8983-account-create-update-c6djt" event={"ID":"e6a37227-d45e-4faa-99a5-9a5d9e8ed031","Type":"ContainerStarted","Data":"5b80a7ba645c049cf0868c9016e47a4ed2038d6310bb97cb25c04dc52fbfbeba"} Feb 16 14:30:33 crc kubenswrapper[4816]: I0216 14:30:33.771120 4816 generic.go:334] "Generic (PLEG): container finished" podID="842ab8ea-5656-4bb5-8553-db49b36e48d2" containerID="887a2181add8831400b68ef95898bb744445170a2e115b7be3df4a78e65e2730" exitCode=0 Feb 16 14:30:33 crc kubenswrapper[4816]: I0216 14:30:33.771159 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-z9gf4" event={"ID":"842ab8ea-5656-4bb5-8553-db49b36e48d2","Type":"ContainerDied","Data":"887a2181add8831400b68ef95898bb744445170a2e115b7be3df4a78e65e2730"} Feb 16 14:30:33 crc kubenswrapper[4816]: I0216 14:30:33.771197 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-z9gf4" event={"ID":"842ab8ea-5656-4bb5-8553-db49b36e48d2","Type":"ContainerStarted","Data":"0a6d7f572e03c1808ad162ba945240452aa304cb4335fa25c1baf3d05e6cce30"} Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.161131 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.168058 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.276102 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jhdq\" (UniqueName: \"kubernetes.io/projected/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-kube-api-access-5jhdq\") pod \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\" (UID: \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\") " Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.276480 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpfwt\" (UniqueName: \"kubernetes.io/projected/842ab8ea-5656-4bb5-8553-db49b36e48d2-kube-api-access-tpfwt\") pod \"842ab8ea-5656-4bb5-8553-db49b36e48d2\" (UID: \"842ab8ea-5656-4bb5-8553-db49b36e48d2\") " Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.276536 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-operator-scripts\") pod \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\" (UID: \"e6a37227-d45e-4faa-99a5-9a5d9e8ed031\") " Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.276635 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842ab8ea-5656-4bb5-8553-db49b36e48d2-operator-scripts\") pod \"842ab8ea-5656-4bb5-8553-db49b36e48d2\" (UID: \"842ab8ea-5656-4bb5-8553-db49b36e48d2\") " Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.278292 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e6a37227-d45e-4faa-99a5-9a5d9e8ed031" (UID: "e6a37227-d45e-4faa-99a5-9a5d9e8ed031"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.278398 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/842ab8ea-5656-4bb5-8553-db49b36e48d2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "842ab8ea-5656-4bb5-8553-db49b36e48d2" (UID: "842ab8ea-5656-4bb5-8553-db49b36e48d2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.291834 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-kube-api-access-5jhdq" (OuterVolumeSpecName: "kube-api-access-5jhdq") pod "e6a37227-d45e-4faa-99a5-9a5d9e8ed031" (UID: "e6a37227-d45e-4faa-99a5-9a5d9e8ed031"). InnerVolumeSpecName "kube-api-access-5jhdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.291906 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/842ab8ea-5656-4bb5-8553-db49b36e48d2-kube-api-access-tpfwt" (OuterVolumeSpecName: "kube-api-access-tpfwt") pod "842ab8ea-5656-4bb5-8553-db49b36e48d2" (UID: "842ab8ea-5656-4bb5-8553-db49b36e48d2"). InnerVolumeSpecName "kube-api-access-tpfwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.378920 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842ab8ea-5656-4bb5-8553-db49b36e48d2-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.378979 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jhdq\" (UniqueName: \"kubernetes.io/projected/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-kube-api-access-5jhdq\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.379098 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpfwt\" (UniqueName: \"kubernetes.io/projected/842ab8ea-5656-4bb5-8553-db49b36e48d2-kube-api-access-tpfwt\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.379122 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6a37227-d45e-4faa-99a5-9a5d9e8ed031-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.788785 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8983-account-create-update-c6djt" event={"ID":"e6a37227-d45e-4faa-99a5-9a5d9e8ed031","Type":"ContainerDied","Data":"5b80a7ba645c049cf0868c9016e47a4ed2038d6310bb97cb25c04dc52fbfbeba"} Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.788853 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b80a7ba645c049cf0868c9016e47a4ed2038d6310bb97cb25c04dc52fbfbeba" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.788799 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8983-account-create-update-c6djt" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.791128 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-z9gf4" event={"ID":"842ab8ea-5656-4bb5-8553-db49b36e48d2","Type":"ContainerDied","Data":"0a6d7f572e03c1808ad162ba945240452aa304cb4335fa25c1baf3d05e6cce30"} Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.791229 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-z9gf4" Feb 16 14:30:35 crc kubenswrapper[4816]: I0216 14:30:35.791677 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a6d7f572e03c1808ad162ba945240452aa304cb4335fa25c1baf3d05e6cce30" Feb 16 14:30:36 crc kubenswrapper[4816]: I0216 14:30:36.941070 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:30:36 crc kubenswrapper[4816]: I0216 14:30:36.941159 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.201801 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.201858 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.243111 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.682197 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-k45zc"] Feb 16 14:30:37 crc kubenswrapper[4816]: E0216 14:30:37.683077 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6a37227-d45e-4faa-99a5-9a5d9e8ed031" containerName="mariadb-account-create-update" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.683112 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6a37227-d45e-4faa-99a5-9a5d9e8ed031" containerName="mariadb-account-create-update" Feb 16 14:30:37 crc kubenswrapper[4816]: E0216 14:30:37.683136 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="842ab8ea-5656-4bb5-8553-db49b36e48d2" containerName="mariadb-database-create" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.683148 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="842ab8ea-5656-4bb5-8553-db49b36e48d2" containerName="mariadb-database-create" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.683369 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6a37227-d45e-4faa-99a5-9a5d9e8ed031" containerName="mariadb-account-create-update" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.683395 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="842ab8ea-5656-4bb5-8553-db49b36e48d2" containerName="mariadb-database-create" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.684121 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.686178 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-tk6m8" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.688930 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.701221 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-k45zc"] Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.823724 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-combined-ca-bundle\") pod \"barbican-db-sync-k45zc\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.823806 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrv7w\" (UniqueName: \"kubernetes.io/projected/15fb76bc-c799-466a-9a76-19316fa40857-kube-api-access-jrv7w\") pod \"barbican-db-sync-k45zc\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.823853 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-db-sync-config-data\") pod \"barbican-db-sync-k45zc\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.854558 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.904320 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fqh9s"] Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.925385 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-combined-ca-bundle\") pod \"barbican-db-sync-k45zc\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.925449 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrv7w\" (UniqueName: \"kubernetes.io/projected/15fb76bc-c799-466a-9a76-19316fa40857-kube-api-access-jrv7w\") pod \"barbican-db-sync-k45zc\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.925485 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-db-sync-config-data\") pod \"barbican-db-sync-k45zc\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.930434 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-db-sync-config-data\") pod \"barbican-db-sync-k45zc\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.930908 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-combined-ca-bundle\") pod \"barbican-db-sync-k45zc\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:37 crc kubenswrapper[4816]: I0216 14:30:37.951913 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrv7w\" (UniqueName: \"kubernetes.io/projected/15fb76bc-c799-466a-9a76-19316fa40857-kube-api-access-jrv7w\") pod \"barbican-db-sync-k45zc\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:38 crc kubenswrapper[4816]: I0216 14:30:38.058846 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:38 crc kubenswrapper[4816]: I0216 14:30:38.597194 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-k45zc"] Feb 16 14:30:38 crc kubenswrapper[4816]: I0216 14:30:38.818844 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-k45zc" event={"ID":"15fb76bc-c799-466a-9a76-19316fa40857","Type":"ContainerStarted","Data":"d5477e971c03b576fbfcb0274378b2b57284409fe81d9b2edc43a2688022ff2f"} Feb 16 14:30:38 crc kubenswrapper[4816]: I0216 14:30:38.818890 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-k45zc" event={"ID":"15fb76bc-c799-466a-9a76-19316fa40857","Type":"ContainerStarted","Data":"51f007b5de9add45aef7b4d6cc0b96da7df0c446783e1ee72f18f8e46b5f8b1d"} Feb 16 14:30:38 crc kubenswrapper[4816]: I0216 14:30:38.843412 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-k45zc" podStartSLOduration=1.8433860119999999 podStartE2EDuration="1.843386012s" podCreationTimestamp="2026-02-16 14:30:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:30:38.833006419 +0000 UTC m=+5238.159720147" watchObservedRunningTime="2026-02-16 14:30:38.843386012 +0000 UTC m=+5238.170099740" Feb 16 14:30:39 crc kubenswrapper[4816]: I0216 14:30:39.824754 4816 generic.go:334] "Generic (PLEG): container finished" podID="15fb76bc-c799-466a-9a76-19316fa40857" containerID="d5477e971c03b576fbfcb0274378b2b57284409fe81d9b2edc43a2688022ff2f" exitCode=0 Feb 16 14:30:39 crc kubenswrapper[4816]: I0216 14:30:39.824880 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-k45zc" event={"ID":"15fb76bc-c799-466a-9a76-19316fa40857","Type":"ContainerDied","Data":"d5477e971c03b576fbfcb0274378b2b57284409fe81d9b2edc43a2688022ff2f"} Feb 16 14:30:39 crc kubenswrapper[4816]: I0216 14:30:39.825292 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fqh9s" podUID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerName="registry-server" containerID="cri-o://8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c" gracePeriod=2 Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.304828 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.476129 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdf9m\" (UniqueName: \"kubernetes.io/projected/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-kube-api-access-qdf9m\") pod \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.476325 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-utilities\") pod \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.476380 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-catalog-content\") pod \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\" (UID: \"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf\") " Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.477233 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-utilities" (OuterVolumeSpecName: "utilities") pod "e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" (UID: "e3ca06b0-94a0-44c6-a5f6-335c1b711aaf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.486906 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-kube-api-access-qdf9m" (OuterVolumeSpecName: "kube-api-access-qdf9m") pod "e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" (UID: "e3ca06b0-94a0-44c6-a5f6-335c1b711aaf"). InnerVolumeSpecName "kube-api-access-qdf9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.579454 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.579537 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdf9m\" (UniqueName: \"kubernetes.io/projected/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-kube-api-access-qdf9m\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.624401 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" (UID: "e3ca06b0-94a0-44c6-a5f6-335c1b711aaf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.681231 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.834801 4816 generic.go:334] "Generic (PLEG): container finished" podID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerID="8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c" exitCode=0 Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.834875 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqh9s" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.834921 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqh9s" event={"ID":"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf","Type":"ContainerDied","Data":"8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c"} Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.834964 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqh9s" event={"ID":"e3ca06b0-94a0-44c6-a5f6-335c1b711aaf","Type":"ContainerDied","Data":"9e69aece2f455e94ad3fd263e04dde976a801aeacdf2316c6858a5de241b968a"} Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.834986 4816 scope.go:117] "RemoveContainer" containerID="8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.861928 4816 scope.go:117] "RemoveContainer" containerID="bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.871271 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fqh9s"] Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.878217 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fqh9s"] Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.898085 4816 scope.go:117] "RemoveContainer" containerID="fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.929904 4816 scope.go:117] "RemoveContainer" containerID="8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c" Feb 16 14:30:40 crc kubenswrapper[4816]: E0216 14:30:40.930388 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c\": container with ID starting with 8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c not found: ID does not exist" containerID="8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.930417 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c"} err="failed to get container status \"8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c\": rpc error: code = NotFound desc = could not find container \"8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c\": container with ID starting with 8866c7ec37eeeaa6a305e69e7fe96b3ba40494f8caf72db324f6770da00c7e8c not found: ID does not exist" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.930439 4816 scope.go:117] "RemoveContainer" containerID="bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a" Feb 16 14:30:40 crc kubenswrapper[4816]: E0216 14:30:40.930792 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a\": container with ID starting with bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a not found: ID does not exist" containerID="bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.930844 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a"} err="failed to get container status \"bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a\": rpc error: code = NotFound desc = could not find container \"bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a\": container with ID starting with bb577f054a5b5e881ee2087b69fbaaa5cc9c9cbb2f84e3dc322a8bbd25f4a98a not found: ID does not exist" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.930878 4816 scope.go:117] "RemoveContainer" containerID="fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68" Feb 16 14:30:40 crc kubenswrapper[4816]: E0216 14:30:40.931174 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68\": container with ID starting with fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68 not found: ID does not exist" containerID="fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68" Feb 16 14:30:40 crc kubenswrapper[4816]: I0216 14:30:40.931197 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68"} err="failed to get container status \"fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68\": rpc error: code = NotFound desc = could not find container \"fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68\": container with ID starting with fc5687292a9676a74ba946e1b2b0c0b8d029fbecbffa278cad51d7a6b69e1b68 not found: ID does not exist" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.154019 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.198841 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-combined-ca-bundle\") pod \"15fb76bc-c799-466a-9a76-19316fa40857\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.198960 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrv7w\" (UniqueName: \"kubernetes.io/projected/15fb76bc-c799-466a-9a76-19316fa40857-kube-api-access-jrv7w\") pod \"15fb76bc-c799-466a-9a76-19316fa40857\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.199127 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-db-sync-config-data\") pod \"15fb76bc-c799-466a-9a76-19316fa40857\" (UID: \"15fb76bc-c799-466a-9a76-19316fa40857\") " Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.202698 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15fb76bc-c799-466a-9a76-19316fa40857-kube-api-access-jrv7w" (OuterVolumeSpecName: "kube-api-access-jrv7w") pod "15fb76bc-c799-466a-9a76-19316fa40857" (UID: "15fb76bc-c799-466a-9a76-19316fa40857"). InnerVolumeSpecName "kube-api-access-jrv7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.206072 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "15fb76bc-c799-466a-9a76-19316fa40857" (UID: "15fb76bc-c799-466a-9a76-19316fa40857"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.228438 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15fb76bc-c799-466a-9a76-19316fa40857" (UID: "15fb76bc-c799-466a-9a76-19316fa40857"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.300959 4816 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.300988 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb76bc-c799-466a-9a76-19316fa40857-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.300997 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrv7w\" (UniqueName: \"kubernetes.io/projected/15fb76bc-c799-466a-9a76-19316fa40857-kube-api-access-jrv7w\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.416943 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" path="/var/lib/kubelet/pods/e3ca06b0-94a0-44c6-a5f6-335c1b711aaf/volumes" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.847871 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-k45zc" event={"ID":"15fb76bc-c799-466a-9a76-19316fa40857","Type":"ContainerDied","Data":"51f007b5de9add45aef7b4d6cc0b96da7df0c446783e1ee72f18f8e46b5f8b1d"} Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.847915 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51f007b5de9add45aef7b4d6cc0b96da7df0c446783e1ee72f18f8e46b5f8b1d" Feb 16 14:30:41 crc kubenswrapper[4816]: I0216 14:30:41.847930 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-k45zc" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.082637 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-68897db679-vhq8b"] Feb 16 14:30:42 crc kubenswrapper[4816]: E0216 14:30:42.083080 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerName="extract-content" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.083099 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerName="extract-content" Feb 16 14:30:42 crc kubenswrapper[4816]: E0216 14:30:42.083140 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerName="registry-server" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.083148 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerName="registry-server" Feb 16 14:30:42 crc kubenswrapper[4816]: E0216 14:30:42.083162 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerName="extract-utilities" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.083171 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerName="extract-utilities" Feb 16 14:30:42 crc kubenswrapper[4816]: E0216 14:30:42.083191 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15fb76bc-c799-466a-9a76-19316fa40857" containerName="barbican-db-sync" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.083200 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="15fb76bc-c799-466a-9a76-19316fa40857" containerName="barbican-db-sync" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.083391 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="15fb76bc-c799-466a-9a76-19316fa40857" containerName="barbican-db-sync" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.083422 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3ca06b0-94a0-44c6-a5f6-335c1b711aaf" containerName="registry-server" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.084483 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.087542 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.088143 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.088540 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-tk6m8" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.103137 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-c76b849c8-6wvld"] Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.104443 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.113408 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.114849 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-logs\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.114889 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-config-data-custom\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.114926 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p74r2\" (UniqueName: \"kubernetes.io/projected/8fb56667-c039-4934-9c5e-6d4a740b0a7e-kube-api-access-p74r2\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.114944 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-combined-ca-bundle\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.114963 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fb56667-c039-4934-9c5e-6d4a740b0a7e-config-data-custom\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.115000 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb56667-c039-4934-9c5e-6d4a740b0a7e-config-data\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.115039 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb56667-c039-4934-9c5e-6d4a740b0a7e-combined-ca-bundle\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.115068 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-config-data\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.115088 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6x2x\" (UniqueName: \"kubernetes.io/projected/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-kube-api-access-w6x2x\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.115252 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fb56667-c039-4934-9c5e-6d4a740b0a7e-logs\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.126168 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-68897db679-vhq8b"] Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.142723 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-c76b849c8-6wvld"] Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.195155 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c8df8777c-tnhcx"] Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.206629 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.221630 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p74r2\" (UniqueName: \"kubernetes.io/projected/8fb56667-c039-4934-9c5e-6d4a740b0a7e-kube-api-access-p74r2\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.221691 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-combined-ca-bundle\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.221717 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-dns-svc\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.221754 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fb56667-c039-4934-9c5e-6d4a740b0a7e-config-data-custom\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.221793 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvlf7\" (UniqueName: \"kubernetes.io/projected/1fd20d79-5e1a-4123-b2fe-65697e1a4399-kube-api-access-nvlf7\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.221861 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-config\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.221887 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb56667-c039-4934-9c5e-6d4a740b0a7e-config-data\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.221958 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb56667-c039-4934-9c5e-6d4a740b0a7e-combined-ca-bundle\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.221989 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-config-data\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.222011 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6x2x\" (UniqueName: \"kubernetes.io/projected/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-kube-api-access-w6x2x\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.222955 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-nb\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.223378 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fb56667-c039-4934-9c5e-6d4a740b0a7e-logs\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.223592 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-sb\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.223915 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-logs\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.223960 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-config-data-custom\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.225696 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-logs\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.225949 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fb56667-c039-4934-9c5e-6d4a740b0a7e-logs\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.229031 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-combined-ca-bundle\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.231888 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c8df8777c-tnhcx"] Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.239047 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb56667-c039-4934-9c5e-6d4a740b0a7e-config-data\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.243112 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb56667-c039-4934-9c5e-6d4a740b0a7e-combined-ca-bundle\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.244601 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fb56667-c039-4934-9c5e-6d4a740b0a7e-config-data-custom\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.245452 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p74r2\" (UniqueName: \"kubernetes.io/projected/8fb56667-c039-4934-9c5e-6d4a740b0a7e-kube-api-access-p74r2\") pod \"barbican-worker-68897db679-vhq8b\" (UID: \"8fb56667-c039-4934-9c5e-6d4a740b0a7e\") " pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.248742 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6x2x\" (UniqueName: \"kubernetes.io/projected/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-kube-api-access-w6x2x\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.250419 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-config-data\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.252568 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b-config-data-custom\") pod \"barbican-keystone-listener-c76b849c8-6wvld\" (UID: \"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b\") " pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.325061 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-sb\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.325233 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-dns-svc\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.325980 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-sb\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.326135 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-dns-svc\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.326231 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvlf7\" (UniqueName: \"kubernetes.io/projected/1fd20d79-5e1a-4123-b2fe-65697e1a4399-kube-api-access-nvlf7\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.327539 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-64684f6b78-j5wzl"] Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.327999 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-config\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.333342 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-config\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.334153 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-nb\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.334942 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-nb\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.336419 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.345601 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-64684f6b78-j5wzl"] Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.345916 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.346272 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvlf7\" (UniqueName: \"kubernetes.io/projected/1fd20d79-5e1a-4123-b2fe-65697e1a4399-kube-api-access-nvlf7\") pod \"dnsmasq-dns-5c8df8777c-tnhcx\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.405776 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-68897db679-vhq8b" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.426071 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.436536 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51985c5a-afd8-4005-a494-403d673f7b2b-config-data\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.436697 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51985c5a-afd8-4005-a494-403d673f7b2b-combined-ca-bundle\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.436780 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51985c5a-afd8-4005-a494-403d673f7b2b-config-data-custom\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.437199 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jpx7\" (UniqueName: \"kubernetes.io/projected/51985c5a-afd8-4005-a494-403d673f7b2b-kube-api-access-7jpx7\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.437277 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51985c5a-afd8-4005-a494-403d673f7b2b-logs\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.523526 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.538877 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51985c5a-afd8-4005-a494-403d673f7b2b-combined-ca-bundle\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.538946 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51985c5a-afd8-4005-a494-403d673f7b2b-config-data-custom\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.539009 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jpx7\" (UniqueName: \"kubernetes.io/projected/51985c5a-afd8-4005-a494-403d673f7b2b-kube-api-access-7jpx7\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.539027 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51985c5a-afd8-4005-a494-403d673f7b2b-logs\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.539051 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51985c5a-afd8-4005-a494-403d673f7b2b-config-data\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.543763 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51985c5a-afd8-4005-a494-403d673f7b2b-config-data-custom\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.544175 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51985c5a-afd8-4005-a494-403d673f7b2b-logs\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.544869 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51985c5a-afd8-4005-a494-403d673f7b2b-config-data\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.544946 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51985c5a-afd8-4005-a494-403d673f7b2b-combined-ca-bundle\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.559982 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jpx7\" (UniqueName: \"kubernetes.io/projected/51985c5a-afd8-4005-a494-403d673f7b2b-kube-api-access-7jpx7\") pod \"barbican-api-64684f6b78-j5wzl\" (UID: \"51985c5a-afd8-4005-a494-403d673f7b2b\") " pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.684608 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.910900 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-68897db679-vhq8b"] Feb 16 14:30:42 crc kubenswrapper[4816]: W0216 14:30:42.916421 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8af86d5_7436_4e20_b9b0_9b39f9f2bd6b.slice/crio-fdb4ae366839a3d6fd4c531d3cc7d9dcafe5e9a24a3a976bdde7fc7e161374f8 WatchSource:0}: Error finding container fdb4ae366839a3d6fd4c531d3cc7d9dcafe5e9a24a3a976bdde7fc7e161374f8: Status 404 returned error can't find the container with id fdb4ae366839a3d6fd4c531d3cc7d9dcafe5e9a24a3a976bdde7fc7e161374f8 Feb 16 14:30:42 crc kubenswrapper[4816]: I0216 14:30:42.917574 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-c76b849c8-6wvld"] Feb 16 14:30:42 crc kubenswrapper[4816]: W0216 14:30:42.922976 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fb56667_c039_4934_9c5e_6d4a740b0a7e.slice/crio-6dde40381761f59ab165aac0122729ce72e7b261cda083ff397beaa9a8a2c566 WatchSource:0}: Error finding container 6dde40381761f59ab165aac0122729ce72e7b261cda083ff397beaa9a8a2c566: Status 404 returned error can't find the container with id 6dde40381761f59ab165aac0122729ce72e7b261cda083ff397beaa9a8a2c566 Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.130408 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c8df8777c-tnhcx"] Feb 16 14:30:43 crc kubenswrapper[4816]: W0216 14:30:43.146113 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fd20d79_5e1a_4123_b2fe_65697e1a4399.slice/crio-9bdff83a0e9890d931bb572fa2ea633be86e2fe8181d715376eb3dbebb064cca WatchSource:0}: Error finding container 9bdff83a0e9890d931bb572fa2ea633be86e2fe8181d715376eb3dbebb064cca: Status 404 returned error can't find the container with id 9bdff83a0e9890d931bb572fa2ea633be86e2fe8181d715376eb3dbebb064cca Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.317474 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-64684f6b78-j5wzl"] Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.892283 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-64684f6b78-j5wzl" event={"ID":"51985c5a-afd8-4005-a494-403d673f7b2b","Type":"ContainerStarted","Data":"f2e8fc6de492c34b4b59fa1fcca19b6bb89ba8115484b7b3550855419e68dc69"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.892331 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-64684f6b78-j5wzl" event={"ID":"51985c5a-afd8-4005-a494-403d673f7b2b","Type":"ContainerStarted","Data":"1396a260de19ff3ae753ab9f3dbd2c53f5e2338925b2dae95b023f4c15538089"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.892345 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-64684f6b78-j5wzl" event={"ID":"51985c5a-afd8-4005-a494-403d673f7b2b","Type":"ContainerStarted","Data":"05d7b868e0cd20d9245b151a2e883ace02061cdd9b5acf31117f3be7077277e4"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.892465 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.892703 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.894577 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" event={"ID":"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b","Type":"ContainerStarted","Data":"392d2e3af9a89deabe5a057000e1a1966581b205c2050cc2ba527d14079ecd7b"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.894635 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" event={"ID":"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b","Type":"ContainerStarted","Data":"69d23a44a68cd8e9ab0153bce0ce7292f2935e6c07d81245f7bcaad3e751614a"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.894646 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" event={"ID":"f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b","Type":"ContainerStarted","Data":"fdb4ae366839a3d6fd4c531d3cc7d9dcafe5e9a24a3a976bdde7fc7e161374f8"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.896122 4816 generic.go:334] "Generic (PLEG): container finished" podID="1fd20d79-5e1a-4123-b2fe-65697e1a4399" containerID="343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb" exitCode=0 Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.896197 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" event={"ID":"1fd20d79-5e1a-4123-b2fe-65697e1a4399","Type":"ContainerDied","Data":"343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.896222 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" event={"ID":"1fd20d79-5e1a-4123-b2fe-65697e1a4399","Type":"ContainerStarted","Data":"9bdff83a0e9890d931bb572fa2ea633be86e2fe8181d715376eb3dbebb064cca"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.898615 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68897db679-vhq8b" event={"ID":"8fb56667-c039-4934-9c5e-6d4a740b0a7e","Type":"ContainerStarted","Data":"58728296a9ba4a443d1f7a9fa5cf4eb0c75c3e9c5289b6cbe65207fa3e456936"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.898642 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68897db679-vhq8b" event={"ID":"8fb56667-c039-4934-9c5e-6d4a740b0a7e","Type":"ContainerStarted","Data":"19b46b89ec157e7e280b1af728bd1991efa0fcc7936b8a793252d9c49531989c"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.898666 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68897db679-vhq8b" event={"ID":"8fb56667-c039-4934-9c5e-6d4a740b0a7e","Type":"ContainerStarted","Data":"6dde40381761f59ab165aac0122729ce72e7b261cda083ff397beaa9a8a2c566"} Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.929717 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-64684f6b78-j5wzl" podStartSLOduration=1.929687494 podStartE2EDuration="1.929687494s" podCreationTimestamp="2026-02-16 14:30:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:30:43.917167213 +0000 UTC m=+5243.243880981" watchObservedRunningTime="2026-02-16 14:30:43.929687494 +0000 UTC m=+5243.256401252" Feb 16 14:30:43 crc kubenswrapper[4816]: I0216 14:30:43.979229 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-68897db679-vhq8b" podStartSLOduration=1.9792077350000001 podStartE2EDuration="1.979207735s" podCreationTimestamp="2026-02-16 14:30:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:30:43.949104594 +0000 UTC m=+5243.275818322" watchObservedRunningTime="2026-02-16 14:30:43.979207735 +0000 UTC m=+5243.305921463" Feb 16 14:30:44 crc kubenswrapper[4816]: I0216 14:30:44.034955 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-c76b849c8-6wvld" podStartSLOduration=2.028631504 podStartE2EDuration="2.028631504s" podCreationTimestamp="2026-02-16 14:30:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:30:44.00906661 +0000 UTC m=+5243.335780358" watchObservedRunningTime="2026-02-16 14:30:44.028631504 +0000 UTC m=+5243.355345242" Feb 16 14:30:44 crc kubenswrapper[4816]: I0216 14:30:44.930467 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" event={"ID":"1fd20d79-5e1a-4123-b2fe-65697e1a4399","Type":"ContainerStarted","Data":"626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f"} Feb 16 14:30:44 crc kubenswrapper[4816]: I0216 14:30:44.951704 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" podStartSLOduration=2.9516706900000003 podStartE2EDuration="2.95167069s" podCreationTimestamp="2026-02-16 14:30:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:30:44.949514831 +0000 UTC m=+5244.276228579" watchObservedRunningTime="2026-02-16 14:30:44.95167069 +0000 UTC m=+5244.278384418" Feb 16 14:30:45 crc kubenswrapper[4816]: I0216 14:30:45.940282 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:51 crc kubenswrapper[4816]: I0216 14:30:51.052098 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-zkvxp"] Feb 16 14:30:51 crc kubenswrapper[4816]: I0216 14:30:51.060874 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-zkvxp"] Feb 16 14:30:51 crc kubenswrapper[4816]: I0216 14:30:51.412379 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d65c1feb-6642-43db-8a3d-35330f524335" path="/var/lib/kubelet/pods/d65c1feb-6642-43db-8a3d-35330f524335/volumes" Feb 16 14:30:52 crc kubenswrapper[4816]: I0216 14:30:52.524810 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:30:52 crc kubenswrapper[4816]: I0216 14:30:52.624705 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dc95c5589-66ksr"] Feb 16 14:30:52 crc kubenswrapper[4816]: I0216 14:30:52.625219 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" podUID="1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" containerName="dnsmasq-dns" containerID="cri-o://4c1221ec7a3253500b8f72bb7db628e1cd63a3035b13340a7b6bedbbd634ec4d" gracePeriod=10 Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.008669 4816 generic.go:334] "Generic (PLEG): container finished" podID="1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" containerID="4c1221ec7a3253500b8f72bb7db628e1cd63a3035b13340a7b6bedbbd634ec4d" exitCode=0 Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.008725 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" event={"ID":"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e","Type":"ContainerDied","Data":"4c1221ec7a3253500b8f72bb7db628e1cd63a3035b13340a7b6bedbbd634ec4d"} Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.107913 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.257176 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6z9gd\" (UniqueName: \"kubernetes.io/projected/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-kube-api-access-6z9gd\") pod \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.257302 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-nb\") pod \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.257357 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-dns-svc\") pod \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.257408 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-sb\") pod \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.257477 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-config\") pod \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\" (UID: \"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e\") " Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.263378 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-kube-api-access-6z9gd" (OuterVolumeSpecName: "kube-api-access-6z9gd") pod "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" (UID: "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e"). InnerVolumeSpecName "kube-api-access-6z9gd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.298980 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" (UID: "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.300809 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-config" (OuterVolumeSpecName: "config") pod "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" (UID: "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.308202 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" (UID: "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.312449 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" (UID: "1a8e1eb5-cdbf-45d4-b916-bb74d8de949e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.359179 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.359210 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.359221 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.359247 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:53 crc kubenswrapper[4816]: I0216 14:30:53.359258 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6z9gd\" (UniqueName: \"kubernetes.io/projected/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e-kube-api-access-6z9gd\") on node \"crc\" DevicePath \"\"" Feb 16 14:30:54 crc kubenswrapper[4816]: I0216 14:30:54.020678 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" event={"ID":"1a8e1eb5-cdbf-45d4-b916-bb74d8de949e","Type":"ContainerDied","Data":"04ab91e432efd082e9b633525480c3101b775d3bd01e9812a70ad0dfde3f19bc"} Feb 16 14:30:54 crc kubenswrapper[4816]: I0216 14:30:54.021017 4816 scope.go:117] "RemoveContainer" containerID="4c1221ec7a3253500b8f72bb7db628e1cd63a3035b13340a7b6bedbbd634ec4d" Feb 16 14:30:54 crc kubenswrapper[4816]: I0216 14:30:54.021145 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dc95c5589-66ksr" Feb 16 14:30:54 crc kubenswrapper[4816]: I0216 14:30:54.076831 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dc95c5589-66ksr"] Feb 16 14:30:54 crc kubenswrapper[4816]: I0216 14:30:54.076912 4816 scope.go:117] "RemoveContainer" containerID="73703f7edc6485d63cfa77a419ba84cba8124dbafaa61f4082e3d6891d650af4" Feb 16 14:30:54 crc kubenswrapper[4816]: I0216 14:30:54.090692 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5dc95c5589-66ksr"] Feb 16 14:30:54 crc kubenswrapper[4816]: I0216 14:30:54.112717 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:54 crc kubenswrapper[4816]: I0216 14:30:54.242319 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-64684f6b78-j5wzl" Feb 16 14:30:55 crc kubenswrapper[4816]: I0216 14:30:55.409304 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" path="/var/lib/kubelet/pods/1a8e1eb5-cdbf-45d4-b916-bb74d8de949e/volumes" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.295148 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-bd6tx"] Feb 16 14:31:06 crc kubenswrapper[4816]: E0216 14:31:06.296254 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" containerName="init" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.296286 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" containerName="init" Feb 16 14:31:06 crc kubenswrapper[4816]: E0216 14:31:06.296307 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" containerName="dnsmasq-dns" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.296314 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" containerName="dnsmasq-dns" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.296563 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a8e1eb5-cdbf-45d4-b916-bb74d8de949e" containerName="dnsmasq-dns" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.297441 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.304628 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-bd6tx"] Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.362116 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bca9d4f-055f-4668-915a-a4a35abf5be7-operator-scripts\") pod \"neutron-db-create-bd6tx\" (UID: \"1bca9d4f-055f-4668-915a-a4a35abf5be7\") " pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.362217 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnr5d\" (UniqueName: \"kubernetes.io/projected/1bca9d4f-055f-4668-915a-a4a35abf5be7-kube-api-access-jnr5d\") pod \"neutron-db-create-bd6tx\" (UID: \"1bca9d4f-055f-4668-915a-a4a35abf5be7\") " pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.387345 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-3207-account-create-update-jlqpt"] Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.390948 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.394260 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.397396 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3207-account-create-update-jlqpt"] Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.463689 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bca9d4f-055f-4668-915a-a4a35abf5be7-operator-scripts\") pod \"neutron-db-create-bd6tx\" (UID: \"1bca9d4f-055f-4668-915a-a4a35abf5be7\") " pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.463775 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ec5e35f-3654-40f7-97ad-29ef5097d445-operator-scripts\") pod \"neutron-3207-account-create-update-jlqpt\" (UID: \"2ec5e35f-3654-40f7-97ad-29ef5097d445\") " pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.463814 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjs5x\" (UniqueName: \"kubernetes.io/projected/2ec5e35f-3654-40f7-97ad-29ef5097d445-kube-api-access-gjs5x\") pod \"neutron-3207-account-create-update-jlqpt\" (UID: \"2ec5e35f-3654-40f7-97ad-29ef5097d445\") " pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.463848 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnr5d\" (UniqueName: \"kubernetes.io/projected/1bca9d4f-055f-4668-915a-a4a35abf5be7-kube-api-access-jnr5d\") pod \"neutron-db-create-bd6tx\" (UID: \"1bca9d4f-055f-4668-915a-a4a35abf5be7\") " pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.464504 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bca9d4f-055f-4668-915a-a4a35abf5be7-operator-scripts\") pod \"neutron-db-create-bd6tx\" (UID: \"1bca9d4f-055f-4668-915a-a4a35abf5be7\") " pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.481442 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnr5d\" (UniqueName: \"kubernetes.io/projected/1bca9d4f-055f-4668-915a-a4a35abf5be7-kube-api-access-jnr5d\") pod \"neutron-db-create-bd6tx\" (UID: \"1bca9d4f-055f-4668-915a-a4a35abf5be7\") " pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.565176 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ec5e35f-3654-40f7-97ad-29ef5097d445-operator-scripts\") pod \"neutron-3207-account-create-update-jlqpt\" (UID: \"2ec5e35f-3654-40f7-97ad-29ef5097d445\") " pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.565246 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjs5x\" (UniqueName: \"kubernetes.io/projected/2ec5e35f-3654-40f7-97ad-29ef5097d445-kube-api-access-gjs5x\") pod \"neutron-3207-account-create-update-jlqpt\" (UID: \"2ec5e35f-3654-40f7-97ad-29ef5097d445\") " pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.566067 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ec5e35f-3654-40f7-97ad-29ef5097d445-operator-scripts\") pod \"neutron-3207-account-create-update-jlqpt\" (UID: \"2ec5e35f-3654-40f7-97ad-29ef5097d445\") " pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.581399 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjs5x\" (UniqueName: \"kubernetes.io/projected/2ec5e35f-3654-40f7-97ad-29ef5097d445-kube-api-access-gjs5x\") pod \"neutron-3207-account-create-update-jlqpt\" (UID: \"2ec5e35f-3654-40f7-97ad-29ef5097d445\") " pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.614458 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.711136 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.940689 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:31:06 crc kubenswrapper[4816]: I0216 14:31:06.940791 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:31:07 crc kubenswrapper[4816]: I0216 14:31:07.100137 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-bd6tx"] Feb 16 14:31:07 crc kubenswrapper[4816]: W0216 14:31:07.106103 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1bca9d4f_055f_4668_915a_a4a35abf5be7.slice/crio-162033d872206e339ee1da8759401a13fdf4b5b289c4beca2cad1fa7a0c5aa4a WatchSource:0}: Error finding container 162033d872206e339ee1da8759401a13fdf4b5b289c4beca2cad1fa7a0c5aa4a: Status 404 returned error can't find the container with id 162033d872206e339ee1da8759401a13fdf4b5b289c4beca2cad1fa7a0c5aa4a Feb 16 14:31:07 crc kubenswrapper[4816]: I0216 14:31:07.208291 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3207-account-create-update-jlqpt"] Feb 16 14:31:07 crc kubenswrapper[4816]: I0216 14:31:07.300142 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3207-account-create-update-jlqpt" event={"ID":"2ec5e35f-3654-40f7-97ad-29ef5097d445","Type":"ContainerStarted","Data":"97195cb8bccf558cfbcd93158787be0408669cf610097dc5547427c70ca40eee"} Feb 16 14:31:07 crc kubenswrapper[4816]: I0216 14:31:07.302859 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bd6tx" event={"ID":"1bca9d4f-055f-4668-915a-a4a35abf5be7","Type":"ContainerStarted","Data":"162033d872206e339ee1da8759401a13fdf4b5b289c4beca2cad1fa7a0c5aa4a"} Feb 16 14:31:08 crc kubenswrapper[4816]: I0216 14:31:08.315318 4816 generic.go:334] "Generic (PLEG): container finished" podID="2ec5e35f-3654-40f7-97ad-29ef5097d445" containerID="95ad177b2ea16b592352bf281ef9cfb4c8bcf81bb6fb1fa75e2eadf11d827fc2" exitCode=0 Feb 16 14:31:08 crc kubenswrapper[4816]: I0216 14:31:08.315898 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3207-account-create-update-jlqpt" event={"ID":"2ec5e35f-3654-40f7-97ad-29ef5097d445","Type":"ContainerDied","Data":"95ad177b2ea16b592352bf281ef9cfb4c8bcf81bb6fb1fa75e2eadf11d827fc2"} Feb 16 14:31:08 crc kubenswrapper[4816]: I0216 14:31:08.319462 4816 generic.go:334] "Generic (PLEG): container finished" podID="1bca9d4f-055f-4668-915a-a4a35abf5be7" containerID="ac814d592aca6efafcbb2056b1ebd67f9c8350f2058884344e2ce5010758b8f0" exitCode=0 Feb 16 14:31:08 crc kubenswrapper[4816]: I0216 14:31:08.319515 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bd6tx" event={"ID":"1bca9d4f-055f-4668-915a-a4a35abf5be7","Type":"ContainerDied","Data":"ac814d592aca6efafcbb2056b1ebd67f9c8350f2058884344e2ce5010758b8f0"} Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.767437 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.777940 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.859447 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjs5x\" (UniqueName: \"kubernetes.io/projected/2ec5e35f-3654-40f7-97ad-29ef5097d445-kube-api-access-gjs5x\") pod \"2ec5e35f-3654-40f7-97ad-29ef5097d445\" (UID: \"2ec5e35f-3654-40f7-97ad-29ef5097d445\") " Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.859489 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ec5e35f-3654-40f7-97ad-29ef5097d445-operator-scripts\") pod \"2ec5e35f-3654-40f7-97ad-29ef5097d445\" (UID: \"2ec5e35f-3654-40f7-97ad-29ef5097d445\") " Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.860121 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ec5e35f-3654-40f7-97ad-29ef5097d445-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2ec5e35f-3654-40f7-97ad-29ef5097d445" (UID: "2ec5e35f-3654-40f7-97ad-29ef5097d445"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.864795 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ec5e35f-3654-40f7-97ad-29ef5097d445-kube-api-access-gjs5x" (OuterVolumeSpecName: "kube-api-access-gjs5x") pod "2ec5e35f-3654-40f7-97ad-29ef5097d445" (UID: "2ec5e35f-3654-40f7-97ad-29ef5097d445"). InnerVolumeSpecName "kube-api-access-gjs5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.961105 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bca9d4f-055f-4668-915a-a4a35abf5be7-operator-scripts\") pod \"1bca9d4f-055f-4668-915a-a4a35abf5be7\" (UID: \"1bca9d4f-055f-4668-915a-a4a35abf5be7\") " Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.961263 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnr5d\" (UniqueName: \"kubernetes.io/projected/1bca9d4f-055f-4668-915a-a4a35abf5be7-kube-api-access-jnr5d\") pod \"1bca9d4f-055f-4668-915a-a4a35abf5be7\" (UID: \"1bca9d4f-055f-4668-915a-a4a35abf5be7\") " Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.961524 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bca9d4f-055f-4668-915a-a4a35abf5be7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1bca9d4f-055f-4668-915a-a4a35abf5be7" (UID: "1bca9d4f-055f-4668-915a-a4a35abf5be7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.963115 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjs5x\" (UniqueName: \"kubernetes.io/projected/2ec5e35f-3654-40f7-97ad-29ef5097d445-kube-api-access-gjs5x\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.963152 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ec5e35f-3654-40f7-97ad-29ef5097d445-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.963171 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bca9d4f-055f-4668-915a-a4a35abf5be7-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:09 crc kubenswrapper[4816]: I0216 14:31:09.965747 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bca9d4f-055f-4668-915a-a4a35abf5be7-kube-api-access-jnr5d" (OuterVolumeSpecName: "kube-api-access-jnr5d") pod "1bca9d4f-055f-4668-915a-a4a35abf5be7" (UID: "1bca9d4f-055f-4668-915a-a4a35abf5be7"). InnerVolumeSpecName "kube-api-access-jnr5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:31:10 crc kubenswrapper[4816]: I0216 14:31:10.064874 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnr5d\" (UniqueName: \"kubernetes.io/projected/1bca9d4f-055f-4668-915a-a4a35abf5be7-kube-api-access-jnr5d\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:10 crc kubenswrapper[4816]: I0216 14:31:10.335509 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bd6tx" Feb 16 14:31:10 crc kubenswrapper[4816]: I0216 14:31:10.335501 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bd6tx" event={"ID":"1bca9d4f-055f-4668-915a-a4a35abf5be7","Type":"ContainerDied","Data":"162033d872206e339ee1da8759401a13fdf4b5b289c4beca2cad1fa7a0c5aa4a"} Feb 16 14:31:10 crc kubenswrapper[4816]: I0216 14:31:10.335674 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="162033d872206e339ee1da8759401a13fdf4b5b289c4beca2cad1fa7a0c5aa4a" Feb 16 14:31:10 crc kubenswrapper[4816]: I0216 14:31:10.337134 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3207-account-create-update-jlqpt" event={"ID":"2ec5e35f-3654-40f7-97ad-29ef5097d445","Type":"ContainerDied","Data":"97195cb8bccf558cfbcd93158787be0408669cf610097dc5547427c70ca40eee"} Feb 16 14:31:10 crc kubenswrapper[4816]: I0216 14:31:10.337159 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97195cb8bccf558cfbcd93158787be0408669cf610097dc5547427c70ca40eee" Feb 16 14:31:10 crc kubenswrapper[4816]: I0216 14:31:10.337207 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3207-account-create-update-jlqpt" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.700845 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-ch7fv"] Feb 16 14:31:11 crc kubenswrapper[4816]: E0216 14:31:11.701432 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bca9d4f-055f-4668-915a-a4a35abf5be7" containerName="mariadb-database-create" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.701447 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bca9d4f-055f-4668-915a-a4a35abf5be7" containerName="mariadb-database-create" Feb 16 14:31:11 crc kubenswrapper[4816]: E0216 14:31:11.701465 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ec5e35f-3654-40f7-97ad-29ef5097d445" containerName="mariadb-account-create-update" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.701471 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ec5e35f-3654-40f7-97ad-29ef5097d445" containerName="mariadb-account-create-update" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.701619 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bca9d4f-055f-4668-915a-a4a35abf5be7" containerName="mariadb-database-create" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.701634 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ec5e35f-3654-40f7-97ad-29ef5097d445" containerName="mariadb-account-create-update" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.702150 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.707140 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-phnph" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.707445 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.707489 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.710475 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-ch7fv"] Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.794480 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-combined-ca-bundle\") pod \"neutron-db-sync-ch7fv\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.794582 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-config\") pod \"neutron-db-sync-ch7fv\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.794618 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h86b2\" (UniqueName: \"kubernetes.io/projected/77203e34-09b4-4000-a542-96fbad2a4eba-kube-api-access-h86b2\") pod \"neutron-db-sync-ch7fv\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.896771 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-combined-ca-bundle\") pod \"neutron-db-sync-ch7fv\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.896931 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-config\") pod \"neutron-db-sync-ch7fv\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.897007 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h86b2\" (UniqueName: \"kubernetes.io/projected/77203e34-09b4-4000-a542-96fbad2a4eba-kube-api-access-h86b2\") pod \"neutron-db-sync-ch7fv\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.903570 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-config\") pod \"neutron-db-sync-ch7fv\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.910515 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-combined-ca-bundle\") pod \"neutron-db-sync-ch7fv\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:11 crc kubenswrapper[4816]: I0216 14:31:11.930387 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h86b2\" (UniqueName: \"kubernetes.io/projected/77203e34-09b4-4000-a542-96fbad2a4eba-kube-api-access-h86b2\") pod \"neutron-db-sync-ch7fv\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:12 crc kubenswrapper[4816]: I0216 14:31:12.023439 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:12 crc kubenswrapper[4816]: I0216 14:31:12.493013 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-ch7fv"] Feb 16 14:31:13 crc kubenswrapper[4816]: I0216 14:31:13.361051 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-ch7fv" event={"ID":"77203e34-09b4-4000-a542-96fbad2a4eba","Type":"ContainerStarted","Data":"9379f1cfeeb32e5dc1339f40981afc4d2b9c76015b27901908c083ef4d620b14"} Feb 16 14:31:13 crc kubenswrapper[4816]: I0216 14:31:13.361577 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-ch7fv" event={"ID":"77203e34-09b4-4000-a542-96fbad2a4eba","Type":"ContainerStarted","Data":"c5041f2e67bca11ebae9c4fe19c4f30b05e253b5945b2d2f41cd3b2e9ce681a5"} Feb 16 14:31:13 crc kubenswrapper[4816]: I0216 14:31:13.375545 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-ch7fv" podStartSLOduration=2.375497472 podStartE2EDuration="2.375497472s" podCreationTimestamp="2026-02-16 14:31:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:31:13.372569062 +0000 UTC m=+5272.699282790" watchObservedRunningTime="2026-02-16 14:31:13.375497472 +0000 UTC m=+5272.702211200" Feb 16 14:31:16 crc kubenswrapper[4816]: I0216 14:31:16.408366 4816 generic.go:334] "Generic (PLEG): container finished" podID="77203e34-09b4-4000-a542-96fbad2a4eba" containerID="9379f1cfeeb32e5dc1339f40981afc4d2b9c76015b27901908c083ef4d620b14" exitCode=0 Feb 16 14:31:16 crc kubenswrapper[4816]: I0216 14:31:16.409092 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-ch7fv" event={"ID":"77203e34-09b4-4000-a542-96fbad2a4eba","Type":"ContainerDied","Data":"9379f1cfeeb32e5dc1339f40981afc4d2b9c76015b27901908c083ef4d620b14"} Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.726444 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.794538 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-combined-ca-bundle\") pod \"77203e34-09b4-4000-a542-96fbad2a4eba\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.794712 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h86b2\" (UniqueName: \"kubernetes.io/projected/77203e34-09b4-4000-a542-96fbad2a4eba-kube-api-access-h86b2\") pod \"77203e34-09b4-4000-a542-96fbad2a4eba\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.794748 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-config\") pod \"77203e34-09b4-4000-a542-96fbad2a4eba\" (UID: \"77203e34-09b4-4000-a542-96fbad2a4eba\") " Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.803925 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77203e34-09b4-4000-a542-96fbad2a4eba-kube-api-access-h86b2" (OuterVolumeSpecName: "kube-api-access-h86b2") pod "77203e34-09b4-4000-a542-96fbad2a4eba" (UID: "77203e34-09b4-4000-a542-96fbad2a4eba"). InnerVolumeSpecName "kube-api-access-h86b2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.819725 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-config" (OuterVolumeSpecName: "config") pod "77203e34-09b4-4000-a542-96fbad2a4eba" (UID: "77203e34-09b4-4000-a542-96fbad2a4eba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.820042 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77203e34-09b4-4000-a542-96fbad2a4eba" (UID: "77203e34-09b4-4000-a542-96fbad2a4eba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.896309 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h86b2\" (UniqueName: \"kubernetes.io/projected/77203e34-09b4-4000-a542-96fbad2a4eba-kube-api-access-h86b2\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.896342 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:17 crc kubenswrapper[4816]: I0216 14:31:17.896355 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77203e34-09b4-4000-a542-96fbad2a4eba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.426049 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-ch7fv" event={"ID":"77203e34-09b4-4000-a542-96fbad2a4eba","Type":"ContainerDied","Data":"c5041f2e67bca11ebae9c4fe19c4f30b05e253b5945b2d2f41cd3b2e9ce681a5"} Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.426095 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5041f2e67bca11ebae9c4fe19c4f30b05e253b5945b2d2f41cd3b2e9ce681a5" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.426134 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-ch7fv" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.668021 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85949b956f-hk9cq"] Feb 16 14:31:18 crc kubenswrapper[4816]: E0216 14:31:18.668404 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77203e34-09b4-4000-a542-96fbad2a4eba" containerName="neutron-db-sync" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.668428 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="77203e34-09b4-4000-a542-96fbad2a4eba" containerName="neutron-db-sync" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.668677 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="77203e34-09b4-4000-a542-96fbad2a4eba" containerName="neutron-db-sync" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.671395 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.694672 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85949b956f-hk9cq"] Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.785063 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-645bb559fc-bxrrg"] Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.786958 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.793294 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.793552 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.793868 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-phnph" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.812679 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qhbh\" (UniqueName: \"kubernetes.io/projected/4c85c898-6c9c-4435-8885-bcf7fec2ce61-kube-api-access-7qhbh\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.812713 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-sb\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.812763 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-config\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.812789 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-nb\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.812915 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-dns-svc\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.813008 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-645bb559fc-bxrrg"] Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.914826 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-combined-ca-bundle\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.915136 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-config\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.915154 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49dk2\" (UniqueName: \"kubernetes.io/projected/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-kube-api-access-49dk2\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.915201 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-dns-svc\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.915238 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qhbh\" (UniqueName: \"kubernetes.io/projected/4c85c898-6c9c-4435-8885-bcf7fec2ce61-kube-api-access-7qhbh\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.915257 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-sb\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.915289 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-httpd-config\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.915359 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-config\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.915398 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-nb\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.916034 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-sb\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.916334 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-nb\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.916399 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-dns-svc\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.916771 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-config\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.937287 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qhbh\" (UniqueName: \"kubernetes.io/projected/4c85c898-6c9c-4435-8885-bcf7fec2ce61-kube-api-access-7qhbh\") pod \"dnsmasq-dns-85949b956f-hk9cq\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:18 crc kubenswrapper[4816]: I0216 14:31:18.996101 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.017134 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-combined-ca-bundle\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.017179 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-config\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.017204 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49dk2\" (UniqueName: \"kubernetes.io/projected/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-kube-api-access-49dk2\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.017260 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-httpd-config\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.026375 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-httpd-config\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.026431 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-config\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.027265 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-combined-ca-bundle\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.044471 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49dk2\" (UniqueName: \"kubernetes.io/projected/dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e-kube-api-access-49dk2\") pod \"neutron-645bb559fc-bxrrg\" (UID: \"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e\") " pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.109504 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.498375 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85949b956f-hk9cq"] Feb 16 14:31:19 crc kubenswrapper[4816]: I0216 14:31:19.745957 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-645bb559fc-bxrrg"] Feb 16 14:31:19 crc kubenswrapper[4816]: W0216 14:31:19.786524 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc3b9126_7c70_4e95_9f82_0b0d37fa7e1e.slice/crio-c58de599a0d6528c5d6efcedbf3c51324b6fae6b777dc410e3cb316c3b1912a2 WatchSource:0}: Error finding container c58de599a0d6528c5d6efcedbf3c51324b6fae6b777dc410e3cb316c3b1912a2: Status 404 returned error can't find the container with id c58de599a0d6528c5d6efcedbf3c51324b6fae6b777dc410e3cb316c3b1912a2 Feb 16 14:31:20 crc kubenswrapper[4816]: I0216 14:31:20.454466 4816 generic.go:334] "Generic (PLEG): container finished" podID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" containerID="a3a5e08f84c6eda55fcc61c4f5cc8cd28c2b7270e7a38181203bf0a840d3f269" exitCode=0 Feb 16 14:31:20 crc kubenswrapper[4816]: I0216 14:31:20.454558 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" event={"ID":"4c85c898-6c9c-4435-8885-bcf7fec2ce61","Type":"ContainerDied","Data":"a3a5e08f84c6eda55fcc61c4f5cc8cd28c2b7270e7a38181203bf0a840d3f269"} Feb 16 14:31:20 crc kubenswrapper[4816]: I0216 14:31:20.454876 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" event={"ID":"4c85c898-6c9c-4435-8885-bcf7fec2ce61","Type":"ContainerStarted","Data":"c57d40dddf02e5a6155e9432ebeba96ce8379863b36d9770eab80672cc6e27ac"} Feb 16 14:31:20 crc kubenswrapper[4816]: I0216 14:31:20.457280 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-645bb559fc-bxrrg" event={"ID":"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e","Type":"ContainerStarted","Data":"53bbf407461c3947a7285fc764c55830e69b64e9a556672f58b1ce6b25b0357d"} Feb 16 14:31:20 crc kubenswrapper[4816]: I0216 14:31:20.457316 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-645bb559fc-bxrrg" event={"ID":"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e","Type":"ContainerStarted","Data":"4b24f34e55d76901618362202266e787a56feefaf21aab00d07a85de5383480d"} Feb 16 14:31:20 crc kubenswrapper[4816]: I0216 14:31:20.457325 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-645bb559fc-bxrrg" event={"ID":"dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e","Type":"ContainerStarted","Data":"c58de599a0d6528c5d6efcedbf3c51324b6fae6b777dc410e3cb316c3b1912a2"} Feb 16 14:31:20 crc kubenswrapper[4816]: I0216 14:31:20.457552 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:20 crc kubenswrapper[4816]: I0216 14:31:20.515412 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-645bb559fc-bxrrg" podStartSLOduration=2.5153883070000003 podStartE2EDuration="2.515388307s" podCreationTimestamp="2026-02-16 14:31:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:31:20.508984573 +0000 UTC m=+5279.835698311" watchObservedRunningTime="2026-02-16 14:31:20.515388307 +0000 UTC m=+5279.842102035" Feb 16 14:31:21 crc kubenswrapper[4816]: I0216 14:31:21.473315 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" event={"ID":"4c85c898-6c9c-4435-8885-bcf7fec2ce61","Type":"ContainerStarted","Data":"07b6431f88ee7fb2a767d1de1832a7397d6b3f91f0a29a3d910f4e7bc56b9666"} Feb 16 14:31:21 crc kubenswrapper[4816]: I0216 14:31:21.473720 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:21 crc kubenswrapper[4816]: I0216 14:31:21.499986 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" podStartSLOduration=3.4999678530000002 podStartE2EDuration="3.499967853s" podCreationTimestamp="2026-02-16 14:31:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:31:21.495168732 +0000 UTC m=+5280.821882460" watchObservedRunningTime="2026-02-16 14:31:21.499967853 +0000 UTC m=+5280.826681581" Feb 16 14:31:27 crc kubenswrapper[4816]: I0216 14:31:27.491202 4816 scope.go:117] "RemoveContainer" containerID="39f3a936c2cd03eeca933b81d4eb7ea9cf415d3b66d8a3beebe551cb3bdc8266" Feb 16 14:31:28 crc kubenswrapper[4816]: I0216 14:31:28.996906 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.063524 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c8df8777c-tnhcx"] Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.063858 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" podUID="1fd20d79-5e1a-4123-b2fe-65697e1a4399" containerName="dnsmasq-dns" containerID="cri-o://626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f" gracePeriod=10 Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.553114 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.563207 4816 generic.go:334] "Generic (PLEG): container finished" podID="1fd20d79-5e1a-4123-b2fe-65697e1a4399" containerID="626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f" exitCode=0 Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.563258 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.563267 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" event={"ID":"1fd20d79-5e1a-4123-b2fe-65697e1a4399","Type":"ContainerDied","Data":"626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f"} Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.563471 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c8df8777c-tnhcx" event={"ID":"1fd20d79-5e1a-4123-b2fe-65697e1a4399","Type":"ContainerDied","Data":"9bdff83a0e9890d931bb572fa2ea633be86e2fe8181d715376eb3dbebb064cca"} Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.563514 4816 scope.go:117] "RemoveContainer" containerID="626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.598256 4816 scope.go:117] "RemoveContainer" containerID="343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.599262 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-dns-svc\") pod \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.599399 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvlf7\" (UniqueName: \"kubernetes.io/projected/1fd20d79-5e1a-4123-b2fe-65697e1a4399-kube-api-access-nvlf7\") pod \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.599451 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-nb\") pod \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.599588 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-sb\") pod \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.599683 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-config\") pod \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\" (UID: \"1fd20d79-5e1a-4123-b2fe-65697e1a4399\") " Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.606288 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd20d79-5e1a-4123-b2fe-65697e1a4399-kube-api-access-nvlf7" (OuterVolumeSpecName: "kube-api-access-nvlf7") pod "1fd20d79-5e1a-4123-b2fe-65697e1a4399" (UID: "1fd20d79-5e1a-4123-b2fe-65697e1a4399"). InnerVolumeSpecName "kube-api-access-nvlf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.640081 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1fd20d79-5e1a-4123-b2fe-65697e1a4399" (UID: "1fd20d79-5e1a-4123-b2fe-65697e1a4399"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.640150 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-config" (OuterVolumeSpecName: "config") pod "1fd20d79-5e1a-4123-b2fe-65697e1a4399" (UID: "1fd20d79-5e1a-4123-b2fe-65697e1a4399"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.650614 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1fd20d79-5e1a-4123-b2fe-65697e1a4399" (UID: "1fd20d79-5e1a-4123-b2fe-65697e1a4399"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.667418 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1fd20d79-5e1a-4123-b2fe-65697e1a4399" (UID: "1fd20d79-5e1a-4123-b2fe-65697e1a4399"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.701521 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.701566 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.701582 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvlf7\" (UniqueName: \"kubernetes.io/projected/1fd20d79-5e1a-4123-b2fe-65697e1a4399-kube-api-access-nvlf7\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.701598 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.701610 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fd20d79-5e1a-4123-b2fe-65697e1a4399-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.707607 4816 scope.go:117] "RemoveContainer" containerID="626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f" Feb 16 14:31:29 crc kubenswrapper[4816]: E0216 14:31:29.708249 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f\": container with ID starting with 626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f not found: ID does not exist" containerID="626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.708289 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f"} err="failed to get container status \"626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f\": rpc error: code = NotFound desc = could not find container \"626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f\": container with ID starting with 626b8574c14c2d46ffb9ec0ecbb8ebd40a92706a2b44c569a6c9ba477a66936f not found: ID does not exist" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.708328 4816 scope.go:117] "RemoveContainer" containerID="343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb" Feb 16 14:31:29 crc kubenswrapper[4816]: E0216 14:31:29.708612 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb\": container with ID starting with 343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb not found: ID does not exist" containerID="343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.708684 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb"} err="failed to get container status \"343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb\": rpc error: code = NotFound desc = could not find container \"343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb\": container with ID starting with 343f3f54bb4c994699c857182f5221a4ebb017c97000f115099f3422c5434dfb not found: ID does not exist" Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.892721 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c8df8777c-tnhcx"] Feb 16 14:31:29 crc kubenswrapper[4816]: I0216 14:31:29.901362 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c8df8777c-tnhcx"] Feb 16 14:31:31 crc kubenswrapper[4816]: I0216 14:31:31.409946 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fd20d79-5e1a-4123-b2fe-65697e1a4399" path="/var/lib/kubelet/pods/1fd20d79-5e1a-4123-b2fe-65697e1a4399/volumes" Feb 16 14:31:36 crc kubenswrapper[4816]: I0216 14:31:36.941486 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:31:36 crc kubenswrapper[4816]: I0216 14:31:36.942343 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:31:36 crc kubenswrapper[4816]: I0216 14:31:36.942414 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:31:36 crc kubenswrapper[4816]: I0216 14:31:36.943493 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:31:36 crc kubenswrapper[4816]: I0216 14:31:36.943595 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" gracePeriod=600 Feb 16 14:31:37 crc kubenswrapper[4816]: E0216 14:31:37.071591 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:31:37 crc kubenswrapper[4816]: I0216 14:31:37.626306 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" exitCode=0 Feb 16 14:31:37 crc kubenswrapper[4816]: I0216 14:31:37.626385 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9"} Feb 16 14:31:37 crc kubenswrapper[4816]: I0216 14:31:37.626640 4816 scope.go:117] "RemoveContainer" containerID="90e2f0f25ce572784388a1d1c59ee443c4cd8305fe9cbe117e83babcb64c00c6" Feb 16 14:31:37 crc kubenswrapper[4816]: I0216 14:31:37.627386 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:31:37 crc kubenswrapper[4816]: E0216 14:31:37.627619 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:31:48 crc kubenswrapper[4816]: I0216 14:31:48.398862 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:31:48 crc kubenswrapper[4816]: E0216 14:31:48.399758 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:31:49 crc kubenswrapper[4816]: I0216 14:31:49.118499 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-645bb559fc-bxrrg" Feb 16 14:31:55 crc kubenswrapper[4816]: I0216 14:31:55.953711 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-9jpzt"] Feb 16 14:31:55 crc kubenswrapper[4816]: E0216 14:31:55.955892 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd20d79-5e1a-4123-b2fe-65697e1a4399" containerName="dnsmasq-dns" Feb 16 14:31:55 crc kubenswrapper[4816]: I0216 14:31:55.956021 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd20d79-5e1a-4123-b2fe-65697e1a4399" containerName="dnsmasq-dns" Feb 16 14:31:55 crc kubenswrapper[4816]: E0216 14:31:55.956142 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd20d79-5e1a-4123-b2fe-65697e1a4399" containerName="init" Feb 16 14:31:55 crc kubenswrapper[4816]: I0216 14:31:55.956236 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd20d79-5e1a-4123-b2fe-65697e1a4399" containerName="init" Feb 16 14:31:55 crc kubenswrapper[4816]: I0216 14:31:55.956606 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fd20d79-5e1a-4123-b2fe-65697e1a4399" containerName="dnsmasq-dns" Feb 16 14:31:55 crc kubenswrapper[4816]: I0216 14:31:55.957593 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:55 crc kubenswrapper[4816]: I0216 14:31:55.964881 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-9jpzt"] Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.051154 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-a715-account-create-update-2tzsx"] Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.052444 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.054644 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.059341 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a715-account-create-update-2tzsx"] Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.062725 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqskm\" (UniqueName: \"kubernetes.io/projected/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-kube-api-access-bqskm\") pod \"glance-db-create-9jpzt\" (UID: \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\") " pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.062926 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-operator-scripts\") pod \"glance-db-create-9jpzt\" (UID: \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\") " pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.164433 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpw2r\" (UniqueName: \"kubernetes.io/projected/0f480b29-6c4f-4fb6-806d-9f505e3377f9-kube-api-access-tpw2r\") pod \"glance-a715-account-create-update-2tzsx\" (UID: \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\") " pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.164517 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-operator-scripts\") pod \"glance-db-create-9jpzt\" (UID: \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\") " pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.164548 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqskm\" (UniqueName: \"kubernetes.io/projected/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-kube-api-access-bqskm\") pod \"glance-db-create-9jpzt\" (UID: \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\") " pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.164565 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f480b29-6c4f-4fb6-806d-9f505e3377f9-operator-scripts\") pod \"glance-a715-account-create-update-2tzsx\" (UID: \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\") " pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.165260 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-operator-scripts\") pod \"glance-db-create-9jpzt\" (UID: \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\") " pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.183128 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqskm\" (UniqueName: \"kubernetes.io/projected/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-kube-api-access-bqskm\") pod \"glance-db-create-9jpzt\" (UID: \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\") " pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.266636 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpw2r\" (UniqueName: \"kubernetes.io/projected/0f480b29-6c4f-4fb6-806d-9f505e3377f9-kube-api-access-tpw2r\") pod \"glance-a715-account-create-update-2tzsx\" (UID: \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\") " pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.266749 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f480b29-6c4f-4fb6-806d-9f505e3377f9-operator-scripts\") pod \"glance-a715-account-create-update-2tzsx\" (UID: \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\") " pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.267486 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f480b29-6c4f-4fb6-806d-9f505e3377f9-operator-scripts\") pod \"glance-a715-account-create-update-2tzsx\" (UID: \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\") " pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.276273 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.284132 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpw2r\" (UniqueName: \"kubernetes.io/projected/0f480b29-6c4f-4fb6-806d-9f505e3377f9-kube-api-access-tpw2r\") pod \"glance-a715-account-create-update-2tzsx\" (UID: \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\") " pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.367049 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.815778 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-9jpzt"] Feb 16 14:31:56 crc kubenswrapper[4816]: I0216 14:31:56.893702 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a715-account-create-update-2tzsx"] Feb 16 14:31:56 crc kubenswrapper[4816]: W0216 14:31:56.900045 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f480b29_6c4f_4fb6_806d_9f505e3377f9.slice/crio-1e7c45f54082bc0b2491fa8f7acf38ff5af2c0903638728e71b2bc5f8fab2cb6 WatchSource:0}: Error finding container 1e7c45f54082bc0b2491fa8f7acf38ff5af2c0903638728e71b2bc5f8fab2cb6: Status 404 returned error can't find the container with id 1e7c45f54082bc0b2491fa8f7acf38ff5af2c0903638728e71b2bc5f8fab2cb6 Feb 16 14:31:57 crc kubenswrapper[4816]: I0216 14:31:57.783677 4816 generic.go:334] "Generic (PLEG): container finished" podID="a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8" containerID="5bb65dda369f1cacdada31c4a52006663090e765d5da790935ffacd22c996e33" exitCode=0 Feb 16 14:31:57 crc kubenswrapper[4816]: I0216 14:31:57.783772 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-9jpzt" event={"ID":"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8","Type":"ContainerDied","Data":"5bb65dda369f1cacdada31c4a52006663090e765d5da790935ffacd22c996e33"} Feb 16 14:31:57 crc kubenswrapper[4816]: I0216 14:31:57.784065 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-9jpzt" event={"ID":"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8","Type":"ContainerStarted","Data":"38a4fabea3e72f447f16ff4746e54e911c12668e04f45c67b01280e7d0c937bc"} Feb 16 14:31:57 crc kubenswrapper[4816]: I0216 14:31:57.787077 4816 generic.go:334] "Generic (PLEG): container finished" podID="0f480b29-6c4f-4fb6-806d-9f505e3377f9" containerID="24789091f3f1fa925ed35e52df49ae01a4b099ffe3f7414a6384f47ca468d0ed" exitCode=0 Feb 16 14:31:57 crc kubenswrapper[4816]: I0216 14:31:57.787116 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a715-account-create-update-2tzsx" event={"ID":"0f480b29-6c4f-4fb6-806d-9f505e3377f9","Type":"ContainerDied","Data":"24789091f3f1fa925ed35e52df49ae01a4b099ffe3f7414a6384f47ca468d0ed"} Feb 16 14:31:57 crc kubenswrapper[4816]: I0216 14:31:57.787137 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a715-account-create-update-2tzsx" event={"ID":"0f480b29-6c4f-4fb6-806d-9f505e3377f9","Type":"ContainerStarted","Data":"1e7c45f54082bc0b2491fa8f7acf38ff5af2c0903638728e71b2bc5f8fab2cb6"} Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.244288 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.253299 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.332264 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqskm\" (UniqueName: \"kubernetes.io/projected/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-kube-api-access-bqskm\") pod \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\" (UID: \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\") " Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.332346 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpw2r\" (UniqueName: \"kubernetes.io/projected/0f480b29-6c4f-4fb6-806d-9f505e3377f9-kube-api-access-tpw2r\") pod \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\" (UID: \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\") " Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.332406 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-operator-scripts\") pod \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\" (UID: \"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8\") " Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.332471 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f480b29-6c4f-4fb6-806d-9f505e3377f9-operator-scripts\") pod \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\" (UID: \"0f480b29-6c4f-4fb6-806d-9f505e3377f9\") " Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.333251 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8" (UID: "a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.333281 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f480b29-6c4f-4fb6-806d-9f505e3377f9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0f480b29-6c4f-4fb6-806d-9f505e3377f9" (UID: "0f480b29-6c4f-4fb6-806d-9f505e3377f9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.338934 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-kube-api-access-bqskm" (OuterVolumeSpecName: "kube-api-access-bqskm") pod "a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8" (UID: "a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8"). InnerVolumeSpecName "kube-api-access-bqskm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.339116 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f480b29-6c4f-4fb6-806d-9f505e3377f9-kube-api-access-tpw2r" (OuterVolumeSpecName: "kube-api-access-tpw2r") pod "0f480b29-6c4f-4fb6-806d-9f505e3377f9" (UID: "0f480b29-6c4f-4fb6-806d-9f505e3377f9"). InnerVolumeSpecName "kube-api-access-tpw2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.434051 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpw2r\" (UniqueName: \"kubernetes.io/projected/0f480b29-6c4f-4fb6-806d-9f505e3377f9-kube-api-access-tpw2r\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.434081 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.434091 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f480b29-6c4f-4fb6-806d-9f505e3377f9-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.434103 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqskm\" (UniqueName: \"kubernetes.io/projected/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8-kube-api-access-bqskm\") on node \"crc\" DevicePath \"\"" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.809227 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-9jpzt" event={"ID":"a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8","Type":"ContainerDied","Data":"38a4fabea3e72f447f16ff4746e54e911c12668e04f45c67b01280e7d0c937bc"} Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.809254 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-9jpzt" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.809272 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38a4fabea3e72f447f16ff4746e54e911c12668e04f45c67b01280e7d0c937bc" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.810999 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a715-account-create-update-2tzsx" event={"ID":"0f480b29-6c4f-4fb6-806d-9f505e3377f9","Type":"ContainerDied","Data":"1e7c45f54082bc0b2491fa8f7acf38ff5af2c0903638728e71b2bc5f8fab2cb6"} Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.811033 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e7c45f54082bc0b2491fa8f7acf38ff5af2c0903638728e71b2bc5f8fab2cb6" Feb 16 14:31:59 crc kubenswrapper[4816]: I0216 14:31:59.811039 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a715-account-create-update-2tzsx" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.222615 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-zfnz9"] Feb 16 14:32:01 crc kubenswrapper[4816]: E0216 14:32:01.223325 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f480b29-6c4f-4fb6-806d-9f505e3377f9" containerName="mariadb-account-create-update" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.223339 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f480b29-6c4f-4fb6-806d-9f505e3377f9" containerName="mariadb-account-create-update" Feb 16 14:32:01 crc kubenswrapper[4816]: E0216 14:32:01.223360 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8" containerName="mariadb-database-create" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.223367 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8" containerName="mariadb-database-create" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.223501 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f480b29-6c4f-4fb6-806d-9f505e3377f9" containerName="mariadb-account-create-update" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.223517 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8" containerName="mariadb-database-create" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.224148 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.226693 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rw8c5" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.226979 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.236094 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-zfnz9"] Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.263366 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-combined-ca-bundle\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.263631 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-db-sync-config-data\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.263675 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p4rr\" (UniqueName: \"kubernetes.io/projected/b1910b7f-7be1-47c4-873b-80f10d60bd0d-kube-api-access-2p4rr\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.263723 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-config-data\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.365322 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-combined-ca-bundle\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.365749 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-db-sync-config-data\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.365930 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p4rr\" (UniqueName: \"kubernetes.io/projected/b1910b7f-7be1-47c4-873b-80f10d60bd0d-kube-api-access-2p4rr\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.366115 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-config-data\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.372361 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-combined-ca-bundle\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.381639 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-config-data\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.381672 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-db-sync-config-data\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.394609 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p4rr\" (UniqueName: \"kubernetes.io/projected/b1910b7f-7be1-47c4-873b-80f10d60bd0d-kube-api-access-2p4rr\") pod \"glance-db-sync-zfnz9\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:01 crc kubenswrapper[4816]: I0216 14:32:01.549591 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:02 crc kubenswrapper[4816]: I0216 14:32:02.066114 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-zfnz9"] Feb 16 14:32:02 crc kubenswrapper[4816]: I0216 14:32:02.399125 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:32:02 crc kubenswrapper[4816]: E0216 14:32:02.399426 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:32:02 crc kubenswrapper[4816]: I0216 14:32:02.858086 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zfnz9" event={"ID":"b1910b7f-7be1-47c4-873b-80f10d60bd0d","Type":"ContainerStarted","Data":"ac352d9ae6ade149ca9d3bda5c25baa31a2e4c90e6b386a1f2bb352eb18c6e76"} Feb 16 14:32:02 crc kubenswrapper[4816]: I0216 14:32:02.858447 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zfnz9" event={"ID":"b1910b7f-7be1-47c4-873b-80f10d60bd0d","Type":"ContainerStarted","Data":"ae2a21a8772b784293f4f64bf2966dbd0648c085d320370f9ce7dfb1b81a52e9"} Feb 16 14:32:02 crc kubenswrapper[4816]: I0216 14:32:02.883892 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-zfnz9" podStartSLOduration=1.883853109 podStartE2EDuration="1.883853109s" podCreationTimestamp="2026-02-16 14:32:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:32:02.877349172 +0000 UTC m=+5322.204062900" watchObservedRunningTime="2026-02-16 14:32:02.883853109 +0000 UTC m=+5322.210566837" Feb 16 14:32:05 crc kubenswrapper[4816]: I0216 14:32:05.890576 4816 generic.go:334] "Generic (PLEG): container finished" podID="b1910b7f-7be1-47c4-873b-80f10d60bd0d" containerID="ac352d9ae6ade149ca9d3bda5c25baa31a2e4c90e6b386a1f2bb352eb18c6e76" exitCode=0 Feb 16 14:32:05 crc kubenswrapper[4816]: I0216 14:32:05.890631 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zfnz9" event={"ID":"b1910b7f-7be1-47c4-873b-80f10d60bd0d","Type":"ContainerDied","Data":"ac352d9ae6ade149ca9d3bda5c25baa31a2e4c90e6b386a1f2bb352eb18c6e76"} Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.360063 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.463907 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-db-sync-config-data\") pod \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.464051 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p4rr\" (UniqueName: \"kubernetes.io/projected/b1910b7f-7be1-47c4-873b-80f10d60bd0d-kube-api-access-2p4rr\") pod \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.464125 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-config-data\") pod \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.464250 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-combined-ca-bundle\") pod \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\" (UID: \"b1910b7f-7be1-47c4-873b-80f10d60bd0d\") " Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.469806 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b1910b7f-7be1-47c4-873b-80f10d60bd0d" (UID: "b1910b7f-7be1-47c4-873b-80f10d60bd0d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.471876 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1910b7f-7be1-47c4-873b-80f10d60bd0d-kube-api-access-2p4rr" (OuterVolumeSpecName: "kube-api-access-2p4rr") pod "b1910b7f-7be1-47c4-873b-80f10d60bd0d" (UID: "b1910b7f-7be1-47c4-873b-80f10d60bd0d"). InnerVolumeSpecName "kube-api-access-2p4rr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.489526 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1910b7f-7be1-47c4-873b-80f10d60bd0d" (UID: "b1910b7f-7be1-47c4-873b-80f10d60bd0d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.508432 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-config-data" (OuterVolumeSpecName: "config-data") pod "b1910b7f-7be1-47c4-873b-80f10d60bd0d" (UID: "b1910b7f-7be1-47c4-873b-80f10d60bd0d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.566215 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.566249 4816 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.566259 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p4rr\" (UniqueName: \"kubernetes.io/projected/b1910b7f-7be1-47c4-873b-80f10d60bd0d-kube-api-access-2p4rr\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.566272 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1910b7f-7be1-47c4-873b-80f10d60bd0d-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.911611 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zfnz9" event={"ID":"b1910b7f-7be1-47c4-873b-80f10d60bd0d","Type":"ContainerDied","Data":"ae2a21a8772b784293f4f64bf2966dbd0648c085d320370f9ce7dfb1b81a52e9"} Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.911706 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zfnz9" Feb 16 14:32:07 crc kubenswrapper[4816]: I0216 14:32:07.911719 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae2a21a8772b784293f4f64bf2966dbd0648c085d320370f9ce7dfb1b81a52e9" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.240029 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:32:08 crc kubenswrapper[4816]: E0216 14:32:08.240393 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1910b7f-7be1-47c4-873b-80f10d60bd0d" containerName="glance-db-sync" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.240414 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1910b7f-7be1-47c4-873b-80f10d60bd0d" containerName="glance-db-sync" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.240674 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1910b7f-7be1-47c4-873b-80f10d60bd0d" containerName="glance-db-sync" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.241581 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.243815 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.243980 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.244100 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rw8c5" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.258223 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.265086 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.277620 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.277971 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-config-data\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.278136 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-scripts\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.278301 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-ceph\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.278407 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-logs\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.278504 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2lx2\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-kube-api-access-k2lx2\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.278676 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.339062 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d9854bc7c-kglmd"] Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.340751 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.363264 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d9854bc7c-kglmd"] Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.385300 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-dns-svc\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.385719 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-ceph\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.386508 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-logs\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.386551 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2lx2\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-kube-api-access-k2lx2\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.386683 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.386930 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-logs\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.388540 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-config\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.388605 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.388686 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-nb\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.388734 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8t9f\" (UniqueName: \"kubernetes.io/projected/f3820e46-12e4-4e66-94f8-e44215149080-kube-api-access-x8t9f\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.388765 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-sb\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.388800 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-config-data\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.388909 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-scripts\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.391146 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.395764 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-scripts\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.402286 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-ceph\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.402889 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-config-data\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.403940 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.410424 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2lx2\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-kube-api-access-k2lx2\") pod \"glance-default-external-api-0\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.490643 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-config\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.490718 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-nb\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.490742 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8t9f\" (UniqueName: \"kubernetes.io/projected/f3820e46-12e4-4e66-94f8-e44215149080-kube-api-access-x8t9f\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.490764 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-sb\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.490848 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-dns-svc\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.491944 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-dns-svc\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.492038 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-config\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.493382 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-nb\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.493627 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-sb\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.509410 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.510910 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.517333 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8t9f\" (UniqueName: \"kubernetes.io/projected/f3820e46-12e4-4e66-94f8-e44215149080-kube-api-access-x8t9f\") pod \"dnsmasq-dns-d9854bc7c-kglmd\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.518101 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.523249 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.571023 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.591904 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.591982 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.592029 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.592069 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-logs\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.592119 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-ceph\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.592141 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spzr9\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-kube-api-access-spzr9\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.592159 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.671053 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.693247 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.693328 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-logs\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.693405 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-ceph\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.693437 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spzr9\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-kube-api-access-spzr9\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.693459 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.693504 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.693553 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.698141 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.698277 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.698304 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-logs\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.702090 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.708711 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-ceph\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.714304 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.720503 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spzr9\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-kube-api-access-spzr9\") pod \"glance-default-internal-api-0\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:08 crc kubenswrapper[4816]: I0216 14:32:08.883803 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.125469 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.168215 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d9854bc7c-kglmd"] Feb 16 14:32:09 crc kubenswrapper[4816]: W0216 14:32:09.172440 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3820e46_12e4_4e66_94f8_e44215149080.slice/crio-9ffc3c180136b356cf16bb01b57b7fb8db3ea21134e643d2f18cbbab39fe24a0 WatchSource:0}: Error finding container 9ffc3c180136b356cf16bb01b57b7fb8db3ea21134e643d2f18cbbab39fe24a0: Status 404 returned error can't find the container with id 9ffc3c180136b356cf16bb01b57b7fb8db3ea21134e643d2f18cbbab39fe24a0 Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.370322 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.531649 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.960827 4816 generic.go:334] "Generic (PLEG): container finished" podID="f3820e46-12e4-4e66-94f8-e44215149080" containerID="88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696" exitCode=0 Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.960896 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" event={"ID":"f3820e46-12e4-4e66-94f8-e44215149080","Type":"ContainerDied","Data":"88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696"} Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.961215 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" event={"ID":"f3820e46-12e4-4e66-94f8-e44215149080","Type":"ContainerStarted","Data":"9ffc3c180136b356cf16bb01b57b7fb8db3ea21134e643d2f18cbbab39fe24a0"} Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.968578 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fc882aa3-2fa8-4e9a-949f-f0775b2187ef","Type":"ContainerStarted","Data":"5c2468c6a9f024de3ba99143279ba05146be6e70fe8070edff044cce0ddf4cba"} Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.970408 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c028a051-c893-4f1b-baa8-13a0f157c1b6","Type":"ContainerStarted","Data":"7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023"} Feb 16 14:32:09 crc kubenswrapper[4816]: I0216 14:32:09.970462 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c028a051-c893-4f1b-baa8-13a0f157c1b6","Type":"ContainerStarted","Data":"47c9f0097d2cd1ede37bc17132f62d941476b22bbfe97fecc70fd74026006dff"} Feb 16 14:32:10 crc kubenswrapper[4816]: I0216 14:32:10.985215 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fc882aa3-2fa8-4e9a-949f-f0775b2187ef","Type":"ContainerStarted","Data":"4a5a14db9fd89f549531484d1f77109addf8c1856c4044d40b8c3e5d3a118e65"} Feb 16 14:32:10 crc kubenswrapper[4816]: I0216 14:32:10.985631 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fc882aa3-2fa8-4e9a-949f-f0775b2187ef","Type":"ContainerStarted","Data":"5be4cccda75ed4d4e8513d197c481d6ceb38dcd6ce26a75e1cd316656c7525c9"} Feb 16 14:32:10 crc kubenswrapper[4816]: I0216 14:32:10.988266 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c028a051-c893-4f1b-baa8-13a0f157c1b6","Type":"ContainerStarted","Data":"1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55"} Feb 16 14:32:10 crc kubenswrapper[4816]: I0216 14:32:10.988277 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerName="glance-log" containerID="cri-o://7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023" gracePeriod=30 Feb 16 14:32:10 crc kubenswrapper[4816]: I0216 14:32:10.988383 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerName="glance-httpd" containerID="cri-o://1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55" gracePeriod=30 Feb 16 14:32:10 crc kubenswrapper[4816]: I0216 14:32:10.993223 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" event={"ID":"f3820e46-12e4-4e66-94f8-e44215149080","Type":"ContainerStarted","Data":"8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf"} Feb 16 14:32:10 crc kubenswrapper[4816]: I0216 14:32:10.993979 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.010051 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.010029137 podStartE2EDuration="3.010029137s" podCreationTimestamp="2026-02-16 14:32:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:32:11.007018105 +0000 UTC m=+5330.333731833" watchObservedRunningTime="2026-02-16 14:32:11.010029137 +0000 UTC m=+5330.336742865" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.045454 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.045430783 podStartE2EDuration="3.045430783s" podCreationTimestamp="2026-02-16 14:32:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:32:11.030514605 +0000 UTC m=+5330.357228333" watchObservedRunningTime="2026-02-16 14:32:11.045430783 +0000 UTC m=+5330.372144511" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.058559 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" podStartSLOduration=3.05853741 podStartE2EDuration="3.05853741s" podCreationTimestamp="2026-02-16 14:32:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:32:11.051849148 +0000 UTC m=+5330.378562886" watchObservedRunningTime="2026-02-16 14:32:11.05853741 +0000 UTC m=+5330.385251138" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.188540 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.563058 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.652296 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-config-data\") pod \"c028a051-c893-4f1b-baa8-13a0f157c1b6\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.652364 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-combined-ca-bundle\") pod \"c028a051-c893-4f1b-baa8-13a0f157c1b6\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.652387 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2lx2\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-kube-api-access-k2lx2\") pod \"c028a051-c893-4f1b-baa8-13a0f157c1b6\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.652447 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-httpd-run\") pod \"c028a051-c893-4f1b-baa8-13a0f157c1b6\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.653095 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-logs\") pod \"c028a051-c893-4f1b-baa8-13a0f157c1b6\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.653140 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-ceph\") pod \"c028a051-c893-4f1b-baa8-13a0f157c1b6\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.653163 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-scripts\") pod \"c028a051-c893-4f1b-baa8-13a0f157c1b6\" (UID: \"c028a051-c893-4f1b-baa8-13a0f157c1b6\") " Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.653015 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c028a051-c893-4f1b-baa8-13a0f157c1b6" (UID: "c028a051-c893-4f1b-baa8-13a0f157c1b6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.653450 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-logs" (OuterVolumeSpecName: "logs") pod "c028a051-c893-4f1b-baa8-13a0f157c1b6" (UID: "c028a051-c893-4f1b-baa8-13a0f157c1b6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.654059 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.654082 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c028a051-c893-4f1b-baa8-13a0f157c1b6-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.657424 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-kube-api-access-k2lx2" (OuterVolumeSpecName: "kube-api-access-k2lx2") pod "c028a051-c893-4f1b-baa8-13a0f157c1b6" (UID: "c028a051-c893-4f1b-baa8-13a0f157c1b6"). InnerVolumeSpecName "kube-api-access-k2lx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.658186 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-ceph" (OuterVolumeSpecName: "ceph") pod "c028a051-c893-4f1b-baa8-13a0f157c1b6" (UID: "c028a051-c893-4f1b-baa8-13a0f157c1b6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.658768 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-scripts" (OuterVolumeSpecName: "scripts") pod "c028a051-c893-4f1b-baa8-13a0f157c1b6" (UID: "c028a051-c893-4f1b-baa8-13a0f157c1b6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.681964 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c028a051-c893-4f1b-baa8-13a0f157c1b6" (UID: "c028a051-c893-4f1b-baa8-13a0f157c1b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.697941 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-config-data" (OuterVolumeSpecName: "config-data") pod "c028a051-c893-4f1b-baa8-13a0f157c1b6" (UID: "c028a051-c893-4f1b-baa8-13a0f157c1b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.757462 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.757503 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.757549 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2lx2\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-kube-api-access-k2lx2\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.757563 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c028a051-c893-4f1b-baa8-13a0f157c1b6-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:11 crc kubenswrapper[4816]: I0216 14:32:11.757574 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c028a051-c893-4f1b-baa8-13a0f157c1b6-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.002545 4816 generic.go:334] "Generic (PLEG): container finished" podID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerID="1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55" exitCode=0 Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.002586 4816 generic.go:334] "Generic (PLEG): container finished" podID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerID="7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023" exitCode=143 Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.002606 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.002616 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c028a051-c893-4f1b-baa8-13a0f157c1b6","Type":"ContainerDied","Data":"1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55"} Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.002673 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c028a051-c893-4f1b-baa8-13a0f157c1b6","Type":"ContainerDied","Data":"7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023"} Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.002692 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c028a051-c893-4f1b-baa8-13a0f157c1b6","Type":"ContainerDied","Data":"47c9f0097d2cd1ede37bc17132f62d941476b22bbfe97fecc70fd74026006dff"} Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.002714 4816 scope.go:117] "RemoveContainer" containerID="1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.031922 4816 scope.go:117] "RemoveContainer" containerID="7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.038611 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.049339 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.058617 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:32:12 crc kubenswrapper[4816]: E0216 14:32:12.059112 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerName="glance-log" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.059133 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerName="glance-log" Feb 16 14:32:12 crc kubenswrapper[4816]: E0216 14:32:12.059145 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerName="glance-httpd" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.059151 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerName="glance-httpd" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.059317 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerName="glance-log" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.059343 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c028a051-c893-4f1b-baa8-13a0f157c1b6" containerName="glance-httpd" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.060389 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.064372 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.067877 4816 scope.go:117] "RemoveContainer" containerID="1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55" Feb 16 14:32:12 crc kubenswrapper[4816]: E0216 14:32:12.068964 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55\": container with ID starting with 1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55 not found: ID does not exist" containerID="1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.069002 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55"} err="failed to get container status \"1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55\": rpc error: code = NotFound desc = could not find container \"1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55\": container with ID starting with 1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55 not found: ID does not exist" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.069021 4816 scope.go:117] "RemoveContainer" containerID="7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023" Feb 16 14:32:12 crc kubenswrapper[4816]: E0216 14:32:12.069354 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023\": container with ID starting with 7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023 not found: ID does not exist" containerID="7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.069377 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023"} err="failed to get container status \"7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023\": rpc error: code = NotFound desc = could not find container \"7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023\": container with ID starting with 7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023 not found: ID does not exist" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.069389 4816 scope.go:117] "RemoveContainer" containerID="1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.069637 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55"} err="failed to get container status \"1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55\": rpc error: code = NotFound desc = could not find container \"1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55\": container with ID starting with 1f0bba50adc6bf77fe04cd1cf1695012862e13c2dc22ed436a01cd0936d04d55 not found: ID does not exist" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.069667 4816 scope.go:117] "RemoveContainer" containerID="7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.069837 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023"} err="failed to get container status \"7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023\": rpc error: code = NotFound desc = could not find container \"7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023\": container with ID starting with 7bf938995357ff13cb23408ea259b7c7146052b387d8cd7d0921d88d9516e023 not found: ID does not exist" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.071588 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.169092 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.169190 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-config-data\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.169224 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.169375 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-ceph\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.169443 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-scripts\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.169554 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-logs\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.169645 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwlbd\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-kube-api-access-qwlbd\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.271218 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.271494 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-config-data\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.271593 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.271720 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-ceph\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.271845 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-scripts\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.271967 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-logs\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.272064 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwlbd\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-kube-api-access-qwlbd\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.271770 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.272307 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-logs\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.275528 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-scripts\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.275623 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.275938 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-ceph\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.276241 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-config-data\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.287115 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwlbd\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-kube-api-access-qwlbd\") pod \"glance-default-external-api-0\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.379898 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:32:12 crc kubenswrapper[4816]: I0216 14:32:12.913063 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:32:13 crc kubenswrapper[4816]: I0216 14:32:13.026433 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4f499d0d-a2bc-4c0d-be4c-344346319421","Type":"ContainerStarted","Data":"7868bb6a114b83d90f21bbefc139c568f490d6a786c0f57451eccfad6f052a8c"} Feb 16 14:32:13 crc kubenswrapper[4816]: I0216 14:32:13.028038 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerName="glance-log" containerID="cri-o://5be4cccda75ed4d4e8513d197c481d6ceb38dcd6ce26a75e1cd316656c7525c9" gracePeriod=30 Feb 16 14:32:13 crc kubenswrapper[4816]: I0216 14:32:13.028083 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerName="glance-httpd" containerID="cri-o://4a5a14db9fd89f549531484d1f77109addf8c1856c4044d40b8c3e5d3a118e65" gracePeriod=30 Feb 16 14:32:13 crc kubenswrapper[4816]: I0216 14:32:13.845158 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c028a051-c893-4f1b-baa8-13a0f157c1b6" path="/var/lib/kubelet/pods/c028a051-c893-4f1b-baa8-13a0f157c1b6/volumes" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.040564 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4f499d0d-a2bc-4c0d-be4c-344346319421","Type":"ContainerStarted","Data":"5962c18ea0aa4a789e7d1f8743586abb8a592c8960bb2701414b7b3d45c0c5ac"} Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.043993 4816 generic.go:334] "Generic (PLEG): container finished" podID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerID="4a5a14db9fd89f549531484d1f77109addf8c1856c4044d40b8c3e5d3a118e65" exitCode=0 Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.044020 4816 generic.go:334] "Generic (PLEG): container finished" podID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerID="5be4cccda75ed4d4e8513d197c481d6ceb38dcd6ce26a75e1cd316656c7525c9" exitCode=143 Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.044039 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fc882aa3-2fa8-4e9a-949f-f0775b2187ef","Type":"ContainerDied","Data":"4a5a14db9fd89f549531484d1f77109addf8c1856c4044d40b8c3e5d3a118e65"} Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.044062 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fc882aa3-2fa8-4e9a-949f-f0775b2187ef","Type":"ContainerDied","Data":"5be4cccda75ed4d4e8513d197c481d6ceb38dcd6ce26a75e1cd316656c7525c9"} Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.186511 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.310178 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-ceph\") pod \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.310231 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-config-data\") pod \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.310488 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-httpd-run\") pod \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.310546 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-scripts\") pod \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.310593 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-logs\") pod \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.310686 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-combined-ca-bundle\") pod \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.310797 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spzr9\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-kube-api-access-spzr9\") pod \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\" (UID: \"fc882aa3-2fa8-4e9a-949f-f0775b2187ef\") " Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.311136 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-logs" (OuterVolumeSpecName: "logs") pod "fc882aa3-2fa8-4e9a-949f-f0775b2187ef" (UID: "fc882aa3-2fa8-4e9a-949f-f0775b2187ef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.311710 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fc882aa3-2fa8-4e9a-949f-f0775b2187ef" (UID: "fc882aa3-2fa8-4e9a-949f-f0775b2187ef"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.315296 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-ceph" (OuterVolumeSpecName: "ceph") pod "fc882aa3-2fa8-4e9a-949f-f0775b2187ef" (UID: "fc882aa3-2fa8-4e9a-949f-f0775b2187ef"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.315423 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-kube-api-access-spzr9" (OuterVolumeSpecName: "kube-api-access-spzr9") pod "fc882aa3-2fa8-4e9a-949f-f0775b2187ef" (UID: "fc882aa3-2fa8-4e9a-949f-f0775b2187ef"). InnerVolumeSpecName "kube-api-access-spzr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.316022 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-scripts" (OuterVolumeSpecName: "scripts") pod "fc882aa3-2fa8-4e9a-949f-f0775b2187ef" (UID: "fc882aa3-2fa8-4e9a-949f-f0775b2187ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.334438 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc882aa3-2fa8-4e9a-949f-f0775b2187ef" (UID: "fc882aa3-2fa8-4e9a-949f-f0775b2187ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.352947 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-config-data" (OuterVolumeSpecName: "config-data") pod "fc882aa3-2fa8-4e9a-949f-f0775b2187ef" (UID: "fc882aa3-2fa8-4e9a-949f-f0775b2187ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.412909 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.412954 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.412967 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.412979 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.412992 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.413003 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:14 crc kubenswrapper[4816]: I0216 14:32:14.413016 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spzr9\" (UniqueName: \"kubernetes.io/projected/fc882aa3-2fa8-4e9a-949f-f0775b2187ef-kube-api-access-spzr9\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.055636 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4f499d0d-a2bc-4c0d-be4c-344346319421","Type":"ContainerStarted","Data":"89ea358324e95e1441e7ee837f0617ffa34e0880a14f0e89e432e8f7c28f9421"} Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.058375 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fc882aa3-2fa8-4e9a-949f-f0775b2187ef","Type":"ContainerDied","Data":"5c2468c6a9f024de3ba99143279ba05146be6e70fe8070edff044cce0ddf4cba"} Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.058444 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.058573 4816 scope.go:117] "RemoveContainer" containerID="4a5a14db9fd89f549531484d1f77109addf8c1856c4044d40b8c3e5d3a118e65" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.080002 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.079978448 podStartE2EDuration="3.079978448s" podCreationTimestamp="2026-02-16 14:32:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:32:15.075126725 +0000 UTC m=+5334.401840473" watchObservedRunningTime="2026-02-16 14:32:15.079978448 +0000 UTC m=+5334.406692196" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.082434 4816 scope.go:117] "RemoveContainer" containerID="5be4cccda75ed4d4e8513d197c481d6ceb38dcd6ce26a75e1cd316656c7525c9" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.101794 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.108458 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.128194 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:32:15 crc kubenswrapper[4816]: E0216 14:32:15.128619 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerName="glance-log" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.128635 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerName="glance-log" Feb 16 14:32:15 crc kubenswrapper[4816]: E0216 14:32:15.128723 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerName="glance-httpd" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.128731 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerName="glance-httpd" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.128883 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerName="glance-httpd" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.128900 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" containerName="glance-log" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.130318 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.131873 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.141523 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.226562 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-config-data\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.226632 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwx66\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-kube-api-access-jwx66\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.226676 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.226699 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-ceph\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.226807 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.226876 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-logs\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.226901 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-scripts\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.328729 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-logs\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.328791 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-scripts\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.328879 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-config-data\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.328930 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwx66\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-kube-api-access-jwx66\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.328953 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.329306 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-logs\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.329352 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-ceph\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.329442 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.329451 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.335256 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-scripts\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.335334 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-config-data\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.337324 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.340603 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-ceph\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.350585 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwx66\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-kube-api-access-jwx66\") pod \"glance-default-internal-api-0\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.422405 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc882aa3-2fa8-4e9a-949f-f0775b2187ef" path="/var/lib/kubelet/pods/fc882aa3-2fa8-4e9a-949f-f0775b2187ef/volumes" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.473250 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:15 crc kubenswrapper[4816]: I0216 14:32:15.991758 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:32:15 crc kubenswrapper[4816]: W0216 14:32:15.995719 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36d355ac_ed74_4343_9ddd_e36e5166df83.slice/crio-d447f4dd67f02dfce175168a28c1762c39d9b8959ad7c930435bc720f8ea4136 WatchSource:0}: Error finding container d447f4dd67f02dfce175168a28c1762c39d9b8959ad7c930435bc720f8ea4136: Status 404 returned error can't find the container with id d447f4dd67f02dfce175168a28c1762c39d9b8959ad7c930435bc720f8ea4136 Feb 16 14:32:16 crc kubenswrapper[4816]: I0216 14:32:16.068266 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36d355ac-ed74-4343-9ddd-e36e5166df83","Type":"ContainerStarted","Data":"d447f4dd67f02dfce175168a28c1762c39d9b8959ad7c930435bc720f8ea4136"} Feb 16 14:32:17 crc kubenswrapper[4816]: I0216 14:32:17.081188 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36d355ac-ed74-4343-9ddd-e36e5166df83","Type":"ContainerStarted","Data":"45901d657d087e3bd7cf2781c50be19b30636d3b0ad263acb29ec6c8e41ea381"} Feb 16 14:32:17 crc kubenswrapper[4816]: I0216 14:32:17.081735 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36d355ac-ed74-4343-9ddd-e36e5166df83","Type":"ContainerStarted","Data":"43413c47567c8111914ba4a1037a4cc0f5821d9898526a33e2c41c621a102d49"} Feb 16 14:32:17 crc kubenswrapper[4816]: I0216 14:32:17.115217 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.115181629 podStartE2EDuration="2.115181629s" podCreationTimestamp="2026-02-16 14:32:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:32:17.109915477 +0000 UTC m=+5336.436629215" watchObservedRunningTime="2026-02-16 14:32:17.115181629 +0000 UTC m=+5336.441895347" Feb 16 14:32:17 crc kubenswrapper[4816]: I0216 14:32:17.398070 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:32:17 crc kubenswrapper[4816]: E0216 14:32:17.398295 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:32:18 crc kubenswrapper[4816]: I0216 14:32:18.673470 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:18 crc kubenswrapper[4816]: I0216 14:32:18.733897 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85949b956f-hk9cq"] Feb 16 14:32:18 crc kubenswrapper[4816]: I0216 14:32:18.734450 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" podUID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" containerName="dnsmasq-dns" containerID="cri-o://07b6431f88ee7fb2a767d1de1832a7397d6b3f91f0a29a3d910f4e7bc56b9666" gracePeriod=10 Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.135534 4816 generic.go:334] "Generic (PLEG): container finished" podID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" containerID="07b6431f88ee7fb2a767d1de1832a7397d6b3f91f0a29a3d910f4e7bc56b9666" exitCode=0 Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.135585 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" event={"ID":"4c85c898-6c9c-4435-8885-bcf7fec2ce61","Type":"ContainerDied","Data":"07b6431f88ee7fb2a767d1de1832a7397d6b3f91f0a29a3d910f4e7bc56b9666"} Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.223335 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.301697 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-nb\") pod \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.302055 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-config\") pod \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.302135 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-sb\") pod \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.302165 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qhbh\" (UniqueName: \"kubernetes.io/projected/4c85c898-6c9c-4435-8885-bcf7fec2ce61-kube-api-access-7qhbh\") pod \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.302288 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-dns-svc\") pod \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.311620 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c85c898-6c9c-4435-8885-bcf7fec2ce61-kube-api-access-7qhbh" (OuterVolumeSpecName: "kube-api-access-7qhbh") pod "4c85c898-6c9c-4435-8885-bcf7fec2ce61" (UID: "4c85c898-6c9c-4435-8885-bcf7fec2ce61"). InnerVolumeSpecName "kube-api-access-7qhbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.346473 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4c85c898-6c9c-4435-8885-bcf7fec2ce61" (UID: "4c85c898-6c9c-4435-8885-bcf7fec2ce61"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.348955 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4c85c898-6c9c-4435-8885-bcf7fec2ce61" (UID: "4c85c898-6c9c-4435-8885-bcf7fec2ce61"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:19 crc kubenswrapper[4816]: E0216 14:32:19.369300 4816 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-config podName:4c85c898-6c9c-4435-8885-bcf7fec2ce61 nodeName:}" failed. No retries permitted until 2026-02-16 14:32:19.86919556 +0000 UTC m=+5339.195909288 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config" (UniqueName: "kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-config") pod "4c85c898-6c9c-4435-8885-bcf7fec2ce61" (UID: "4c85c898-6c9c-4435-8885-bcf7fec2ce61") : error deleting /var/lib/kubelet/pods/4c85c898-6c9c-4435-8885-bcf7fec2ce61/volume-subpaths: remove /var/lib/kubelet/pods/4c85c898-6c9c-4435-8885-bcf7fec2ce61/volume-subpaths: no such file or directory Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.369814 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4c85c898-6c9c-4435-8885-bcf7fec2ce61" (UID: "4c85c898-6c9c-4435-8885-bcf7fec2ce61"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.403795 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.403831 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.403843 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.403856 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qhbh\" (UniqueName: \"kubernetes.io/projected/4c85c898-6c9c-4435-8885-bcf7fec2ce61-kube-api-access-7qhbh\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.913203 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-config\") pod \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\" (UID: \"4c85c898-6c9c-4435-8885-bcf7fec2ce61\") " Feb 16 14:32:19 crc kubenswrapper[4816]: I0216 14:32:19.914732 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-config" (OuterVolumeSpecName: "config") pod "4c85c898-6c9c-4435-8885-bcf7fec2ce61" (UID: "4c85c898-6c9c-4435-8885-bcf7fec2ce61"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:20 crc kubenswrapper[4816]: I0216 14:32:20.015302 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c85c898-6c9c-4435-8885-bcf7fec2ce61-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:20 crc kubenswrapper[4816]: I0216 14:32:20.145087 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" event={"ID":"4c85c898-6c9c-4435-8885-bcf7fec2ce61","Type":"ContainerDied","Data":"c57d40dddf02e5a6155e9432ebeba96ce8379863b36d9770eab80672cc6e27ac"} Feb 16 14:32:20 crc kubenswrapper[4816]: I0216 14:32:20.145181 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" Feb 16 14:32:20 crc kubenswrapper[4816]: I0216 14:32:20.145481 4816 scope.go:117] "RemoveContainer" containerID="07b6431f88ee7fb2a767d1de1832a7397d6b3f91f0a29a3d910f4e7bc56b9666" Feb 16 14:32:20 crc kubenswrapper[4816]: I0216 14:32:20.166170 4816 scope.go:117] "RemoveContainer" containerID="a3a5e08f84c6eda55fcc61c4f5cc8cd28c2b7270e7a38181203bf0a840d3f269" Feb 16 14:32:20 crc kubenswrapper[4816]: I0216 14:32:20.182480 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85949b956f-hk9cq"] Feb 16 14:32:20 crc kubenswrapper[4816]: I0216 14:32:20.190154 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85949b956f-hk9cq"] Feb 16 14:32:21 crc kubenswrapper[4816]: I0216 14:32:21.418890 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" path="/var/lib/kubelet/pods/4c85c898-6c9c-4435-8885-bcf7fec2ce61/volumes" Feb 16 14:32:22 crc kubenswrapper[4816]: I0216 14:32:22.380277 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 16 14:32:22 crc kubenswrapper[4816]: I0216 14:32:22.381000 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 16 14:32:22 crc kubenswrapper[4816]: I0216 14:32:22.423893 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 16 14:32:22 crc kubenswrapper[4816]: I0216 14:32:22.426047 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 16 14:32:23 crc kubenswrapper[4816]: I0216 14:32:23.174460 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 16 14:32:23 crc kubenswrapper[4816]: I0216 14:32:23.174510 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 16 14:32:23 crc kubenswrapper[4816]: I0216 14:32:23.997380 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-85949b956f-hk9cq" podUID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.33:5353: i/o timeout" Feb 16 14:32:25 crc kubenswrapper[4816]: I0216 14:32:25.160879 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 16 14:32:25 crc kubenswrapper[4816]: I0216 14:32:25.170909 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 16 14:32:25 crc kubenswrapper[4816]: I0216 14:32:25.474316 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:25 crc kubenswrapper[4816]: I0216 14:32:25.474716 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:25 crc kubenswrapper[4816]: I0216 14:32:25.504528 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:25 crc kubenswrapper[4816]: I0216 14:32:25.514195 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:26 crc kubenswrapper[4816]: I0216 14:32:26.208715 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:26 crc kubenswrapper[4816]: I0216 14:32:26.208755 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:28 crc kubenswrapper[4816]: I0216 14:32:28.283771 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:28 crc kubenswrapper[4816]: I0216 14:32:28.284127 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 16 14:32:30 crc kubenswrapper[4816]: I0216 14:32:30.398277 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:32:30 crc kubenswrapper[4816]: E0216 14:32:30.398896 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:32:33 crc kubenswrapper[4816]: I0216 14:32:33.894074 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-lbnxq"] Feb 16 14:32:33 crc kubenswrapper[4816]: E0216 14:32:33.895030 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" containerName="dnsmasq-dns" Feb 16 14:32:33 crc kubenswrapper[4816]: I0216 14:32:33.895048 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" containerName="dnsmasq-dns" Feb 16 14:32:33 crc kubenswrapper[4816]: E0216 14:32:33.895085 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" containerName="init" Feb 16 14:32:33 crc kubenswrapper[4816]: I0216 14:32:33.895091 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" containerName="init" Feb 16 14:32:33 crc kubenswrapper[4816]: I0216 14:32:33.895263 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c85c898-6c9c-4435-8885-bcf7fec2ce61" containerName="dnsmasq-dns" Feb 16 14:32:33 crc kubenswrapper[4816]: I0216 14:32:33.895870 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:33 crc kubenswrapper[4816]: I0216 14:32:33.913325 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lbnxq"] Feb 16 14:32:33 crc kubenswrapper[4816]: I0216 14:32:33.997751 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-1890-account-create-update-przbk"] Feb 16 14:32:33 crc kubenswrapper[4816]: I0216 14:32:33.999704 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.002065 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.016254 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1890-account-create-update-przbk"] Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.062845 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxp7p\" (UniqueName: \"kubernetes.io/projected/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-kube-api-access-lxp7p\") pod \"placement-db-create-lbnxq\" (UID: \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\") " pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.062927 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-operator-scripts\") pod \"placement-db-create-lbnxq\" (UID: \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\") " pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.164726 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9lwh\" (UniqueName: \"kubernetes.io/projected/0612081d-442a-4110-8a23-8b5654e6a238-kube-api-access-d9lwh\") pod \"placement-1890-account-create-update-przbk\" (UID: \"0612081d-442a-4110-8a23-8b5654e6a238\") " pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.164813 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxp7p\" (UniqueName: \"kubernetes.io/projected/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-kube-api-access-lxp7p\") pod \"placement-db-create-lbnxq\" (UID: \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\") " pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.165036 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-operator-scripts\") pod \"placement-db-create-lbnxq\" (UID: \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\") " pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.165190 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0612081d-442a-4110-8a23-8b5654e6a238-operator-scripts\") pod \"placement-1890-account-create-update-przbk\" (UID: \"0612081d-442a-4110-8a23-8b5654e6a238\") " pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.165833 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-operator-scripts\") pod \"placement-db-create-lbnxq\" (UID: \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\") " pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.182506 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxp7p\" (UniqueName: \"kubernetes.io/projected/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-kube-api-access-lxp7p\") pod \"placement-db-create-lbnxq\" (UID: \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\") " pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.217062 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.269621 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0612081d-442a-4110-8a23-8b5654e6a238-operator-scripts\") pod \"placement-1890-account-create-update-przbk\" (UID: \"0612081d-442a-4110-8a23-8b5654e6a238\") " pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.269832 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9lwh\" (UniqueName: \"kubernetes.io/projected/0612081d-442a-4110-8a23-8b5654e6a238-kube-api-access-d9lwh\") pod \"placement-1890-account-create-update-przbk\" (UID: \"0612081d-442a-4110-8a23-8b5654e6a238\") " pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.270813 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0612081d-442a-4110-8a23-8b5654e6a238-operator-scripts\") pod \"placement-1890-account-create-update-przbk\" (UID: \"0612081d-442a-4110-8a23-8b5654e6a238\") " pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.290605 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9lwh\" (UniqueName: \"kubernetes.io/projected/0612081d-442a-4110-8a23-8b5654e6a238-kube-api-access-d9lwh\") pod \"placement-1890-account-create-update-przbk\" (UID: \"0612081d-442a-4110-8a23-8b5654e6a238\") " pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.353286 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.720939 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lbnxq"] Feb 16 14:32:34 crc kubenswrapper[4816]: W0216 14:32:34.737965 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb51a0b5_f0ae_4ad6_907c_ccf15813df4f.slice/crio-46b6515c2fd289ed0962be02b207100a5bda1690706f225c1a3c33374ab09ff6 WatchSource:0}: Error finding container 46b6515c2fd289ed0962be02b207100a5bda1690706f225c1a3c33374ab09ff6: Status 404 returned error can't find the container with id 46b6515c2fd289ed0962be02b207100a5bda1690706f225c1a3c33374ab09ff6 Feb 16 14:32:34 crc kubenswrapper[4816]: I0216 14:32:34.848436 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1890-account-create-update-przbk"] Feb 16 14:32:35 crc kubenswrapper[4816]: I0216 14:32:35.298292 4816 generic.go:334] "Generic (PLEG): container finished" podID="0612081d-442a-4110-8a23-8b5654e6a238" containerID="66b324fa5c294672742de09a17c09891c072d91238ef0970cd96af7a8861e62e" exitCode=0 Feb 16 14:32:35 crc kubenswrapper[4816]: I0216 14:32:35.298580 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1890-account-create-update-przbk" event={"ID":"0612081d-442a-4110-8a23-8b5654e6a238","Type":"ContainerDied","Data":"66b324fa5c294672742de09a17c09891c072d91238ef0970cd96af7a8861e62e"} Feb 16 14:32:35 crc kubenswrapper[4816]: I0216 14:32:35.298707 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1890-account-create-update-przbk" event={"ID":"0612081d-442a-4110-8a23-8b5654e6a238","Type":"ContainerStarted","Data":"4add7d772576de9d95b6ebe070cce276e5a0bc7cdc622cfa17d7aea70cc4a881"} Feb 16 14:32:35 crc kubenswrapper[4816]: I0216 14:32:35.302145 4816 generic.go:334] "Generic (PLEG): container finished" podID="db51a0b5-f0ae-4ad6-907c-ccf15813df4f" containerID="6a5d89bfe05b7c8e80ffa3fe765c5fe2f4746ca48c979a2e975a870d10945f64" exitCode=0 Feb 16 14:32:35 crc kubenswrapper[4816]: I0216 14:32:35.302187 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lbnxq" event={"ID":"db51a0b5-f0ae-4ad6-907c-ccf15813df4f","Type":"ContainerDied","Data":"6a5d89bfe05b7c8e80ffa3fe765c5fe2f4746ca48c979a2e975a870d10945f64"} Feb 16 14:32:35 crc kubenswrapper[4816]: I0216 14:32:35.302213 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lbnxq" event={"ID":"db51a0b5-f0ae-4ad6-907c-ccf15813df4f","Type":"ContainerStarted","Data":"46b6515c2fd289ed0962be02b207100a5bda1690706f225c1a3c33374ab09ff6"} Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.725468 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.730786 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.816254 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-operator-scripts\") pod \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\" (UID: \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\") " Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.816398 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxp7p\" (UniqueName: \"kubernetes.io/projected/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-kube-api-access-lxp7p\") pod \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\" (UID: \"db51a0b5-f0ae-4ad6-907c-ccf15813df4f\") " Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.816459 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9lwh\" (UniqueName: \"kubernetes.io/projected/0612081d-442a-4110-8a23-8b5654e6a238-kube-api-access-d9lwh\") pod \"0612081d-442a-4110-8a23-8b5654e6a238\" (UID: \"0612081d-442a-4110-8a23-8b5654e6a238\") " Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.816518 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0612081d-442a-4110-8a23-8b5654e6a238-operator-scripts\") pod \"0612081d-442a-4110-8a23-8b5654e6a238\" (UID: \"0612081d-442a-4110-8a23-8b5654e6a238\") " Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.817037 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0612081d-442a-4110-8a23-8b5654e6a238-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0612081d-442a-4110-8a23-8b5654e6a238" (UID: "0612081d-442a-4110-8a23-8b5654e6a238"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.817105 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "db51a0b5-f0ae-4ad6-907c-ccf15813df4f" (UID: "db51a0b5-f0ae-4ad6-907c-ccf15813df4f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.821798 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0612081d-442a-4110-8a23-8b5654e6a238-kube-api-access-d9lwh" (OuterVolumeSpecName: "kube-api-access-d9lwh") pod "0612081d-442a-4110-8a23-8b5654e6a238" (UID: "0612081d-442a-4110-8a23-8b5654e6a238"). InnerVolumeSpecName "kube-api-access-d9lwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.821856 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-kube-api-access-lxp7p" (OuterVolumeSpecName: "kube-api-access-lxp7p") pod "db51a0b5-f0ae-4ad6-907c-ccf15813df4f" (UID: "db51a0b5-f0ae-4ad6-907c-ccf15813df4f"). InnerVolumeSpecName "kube-api-access-lxp7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.917936 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxp7p\" (UniqueName: \"kubernetes.io/projected/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-kube-api-access-lxp7p\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.917971 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9lwh\" (UniqueName: \"kubernetes.io/projected/0612081d-442a-4110-8a23-8b5654e6a238-kube-api-access-d9lwh\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.917981 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0612081d-442a-4110-8a23-8b5654e6a238-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:36 crc kubenswrapper[4816]: I0216 14:32:36.917991 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db51a0b5-f0ae-4ad6-907c-ccf15813df4f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:37 crc kubenswrapper[4816]: I0216 14:32:37.317395 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1890-account-create-update-przbk" Feb 16 14:32:37 crc kubenswrapper[4816]: I0216 14:32:37.317721 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1890-account-create-update-przbk" event={"ID":"0612081d-442a-4110-8a23-8b5654e6a238","Type":"ContainerDied","Data":"4add7d772576de9d95b6ebe070cce276e5a0bc7cdc622cfa17d7aea70cc4a881"} Feb 16 14:32:37 crc kubenswrapper[4816]: I0216 14:32:37.317833 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4add7d772576de9d95b6ebe070cce276e5a0bc7cdc622cfa17d7aea70cc4a881" Feb 16 14:32:37 crc kubenswrapper[4816]: I0216 14:32:37.319161 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lbnxq" event={"ID":"db51a0b5-f0ae-4ad6-907c-ccf15813df4f","Type":"ContainerDied","Data":"46b6515c2fd289ed0962be02b207100a5bda1690706f225c1a3c33374ab09ff6"} Feb 16 14:32:37 crc kubenswrapper[4816]: I0216 14:32:37.319281 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46b6515c2fd289ed0962be02b207100a5bda1690706f225c1a3c33374ab09ff6" Feb 16 14:32:37 crc kubenswrapper[4816]: I0216 14:32:37.319218 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lbnxq" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.271507 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6ddc577c4f-cch6q"] Feb 16 14:32:39 crc kubenswrapper[4816]: E0216 14:32:39.272234 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db51a0b5-f0ae-4ad6-907c-ccf15813df4f" containerName="mariadb-database-create" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.272253 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="db51a0b5-f0ae-4ad6-907c-ccf15813df4f" containerName="mariadb-database-create" Feb 16 14:32:39 crc kubenswrapper[4816]: E0216 14:32:39.272298 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0612081d-442a-4110-8a23-8b5654e6a238" containerName="mariadb-account-create-update" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.272306 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0612081d-442a-4110-8a23-8b5654e6a238" containerName="mariadb-account-create-update" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.272504 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="db51a0b5-f0ae-4ad6-907c-ccf15813df4f" containerName="mariadb-database-create" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.272528 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0612081d-442a-4110-8a23-8b5654e6a238" containerName="mariadb-account-create-update" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.273747 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.284642 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ddc577c4f-cch6q"] Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.312340 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-pqcs9"] Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.318155 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.323121 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-jqswm" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.323459 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.323620 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.328148 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-pqcs9"] Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.363595 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-dns-svc\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.363708 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94h28\" (UniqueName: \"kubernetes.io/projected/c5eb2031-9005-436a-827c-4c3df3b13d75-kube-api-access-94h28\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.363761 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-sb\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.363909 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-config\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.364012 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-nb\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466132 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-config\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466185 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sncz\" (UniqueName: \"kubernetes.io/projected/e541c102-5476-4214-9555-361d24cef7c9-kube-api-access-2sncz\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466216 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-combined-ca-bundle\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466261 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-nb\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466316 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-dns-svc\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466333 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e541c102-5476-4214-9555-361d24cef7c9-logs\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466351 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-config-data\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466370 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94h28\" (UniqueName: \"kubernetes.io/projected/c5eb2031-9005-436a-827c-4c3df3b13d75-kube-api-access-94h28\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466393 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-sb\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.466448 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-scripts\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.467530 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-config\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.468504 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-nb\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.468550 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-sb\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.468641 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-dns-svc\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.488756 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94h28\" (UniqueName: \"kubernetes.io/projected/c5eb2031-9005-436a-827c-4c3df3b13d75-kube-api-access-94h28\") pod \"dnsmasq-dns-6ddc577c4f-cch6q\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.567797 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e541c102-5476-4214-9555-361d24cef7c9-logs\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.567863 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-config-data\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.567931 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-scripts\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.567987 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sncz\" (UniqueName: \"kubernetes.io/projected/e541c102-5476-4214-9555-361d24cef7c9-kube-api-access-2sncz\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.568028 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-combined-ca-bundle\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.568370 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e541c102-5476-4214-9555-361d24cef7c9-logs\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.572015 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-scripts\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.574199 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-combined-ca-bundle\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.578424 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-config-data\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.582753 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sncz\" (UniqueName: \"kubernetes.io/projected/e541c102-5476-4214-9555-361d24cef7c9-kube-api-access-2sncz\") pod \"placement-db-sync-pqcs9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.602873 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:39 crc kubenswrapper[4816]: I0216 14:32:39.699764 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:40 crc kubenswrapper[4816]: I0216 14:32:40.232857 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ddc577c4f-cch6q"] Feb 16 14:32:40 crc kubenswrapper[4816]: W0216 14:32:40.241107 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5eb2031_9005_436a_827c_4c3df3b13d75.slice/crio-97bcd34cf7a622dbe2dc5581136a4a1fe876f92b7599f95d7de7890d6bfddc1a WatchSource:0}: Error finding container 97bcd34cf7a622dbe2dc5581136a4a1fe876f92b7599f95d7de7890d6bfddc1a: Status 404 returned error can't find the container with id 97bcd34cf7a622dbe2dc5581136a4a1fe876f92b7599f95d7de7890d6bfddc1a Feb 16 14:32:40 crc kubenswrapper[4816]: I0216 14:32:40.331334 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-pqcs9"] Feb 16 14:32:40 crc kubenswrapper[4816]: I0216 14:32:40.341191 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" event={"ID":"c5eb2031-9005-436a-827c-4c3df3b13d75","Type":"ContainerStarted","Data":"97bcd34cf7a622dbe2dc5581136a4a1fe876f92b7599f95d7de7890d6bfddc1a"} Feb 16 14:32:40 crc kubenswrapper[4816]: W0216 14:32:40.346099 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode541c102_5476_4214_9555_361d24cef7c9.slice/crio-d42f9d3ad8bc396a8357a32f36e999f9b1129b29dd05beb05b6fab0d992fc48d WatchSource:0}: Error finding container d42f9d3ad8bc396a8357a32f36e999f9b1129b29dd05beb05b6fab0d992fc48d: Status 404 returned error can't find the container with id d42f9d3ad8bc396a8357a32f36e999f9b1129b29dd05beb05b6fab0d992fc48d Feb 16 14:32:41 crc kubenswrapper[4816]: I0216 14:32:41.351452 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-pqcs9" event={"ID":"e541c102-5476-4214-9555-361d24cef7c9","Type":"ContainerStarted","Data":"1fb1923ff1f0b2a12075a07d9cd9bf7cec29ffabbb7beb655522b9d2b178fed2"} Feb 16 14:32:41 crc kubenswrapper[4816]: I0216 14:32:41.351767 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-pqcs9" event={"ID":"e541c102-5476-4214-9555-361d24cef7c9","Type":"ContainerStarted","Data":"d42f9d3ad8bc396a8357a32f36e999f9b1129b29dd05beb05b6fab0d992fc48d"} Feb 16 14:32:41 crc kubenswrapper[4816]: I0216 14:32:41.355721 4816 generic.go:334] "Generic (PLEG): container finished" podID="c5eb2031-9005-436a-827c-4c3df3b13d75" containerID="d20e93b4a5955702d768494279d6f07469857f5327bfad0a016b26c0fef3a8c1" exitCode=0 Feb 16 14:32:41 crc kubenswrapper[4816]: I0216 14:32:41.356015 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" event={"ID":"c5eb2031-9005-436a-827c-4c3df3b13d75","Type":"ContainerDied","Data":"d20e93b4a5955702d768494279d6f07469857f5327bfad0a016b26c0fef3a8c1"} Feb 16 14:32:41 crc kubenswrapper[4816]: I0216 14:32:41.368250 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-pqcs9" podStartSLOduration=2.368225649 podStartE2EDuration="2.368225649s" podCreationTimestamp="2026-02-16 14:32:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:32:41.366122881 +0000 UTC m=+5360.692836609" watchObservedRunningTime="2026-02-16 14:32:41.368225649 +0000 UTC m=+5360.694939377" Feb 16 14:32:42 crc kubenswrapper[4816]: I0216 14:32:42.378919 4816 generic.go:334] "Generic (PLEG): container finished" podID="e541c102-5476-4214-9555-361d24cef7c9" containerID="1fb1923ff1f0b2a12075a07d9cd9bf7cec29ffabbb7beb655522b9d2b178fed2" exitCode=0 Feb 16 14:32:42 crc kubenswrapper[4816]: I0216 14:32:42.379032 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-pqcs9" event={"ID":"e541c102-5476-4214-9555-361d24cef7c9","Type":"ContainerDied","Data":"1fb1923ff1f0b2a12075a07d9cd9bf7cec29ffabbb7beb655522b9d2b178fed2"} Feb 16 14:32:42 crc kubenswrapper[4816]: I0216 14:32:42.390209 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" event={"ID":"c5eb2031-9005-436a-827c-4c3df3b13d75","Type":"ContainerStarted","Data":"a0e6b281aba02a20b5b90a05868baaebb124fd63a12b028ab95f350282699cbe"} Feb 16 14:32:42 crc kubenswrapper[4816]: I0216 14:32:42.390338 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:42 crc kubenswrapper[4816]: I0216 14:32:42.421961 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" podStartSLOduration=3.421937829 podStartE2EDuration="3.421937829s" podCreationTimestamp="2026-02-16 14:32:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:32:42.415926835 +0000 UTC m=+5361.742640583" watchObservedRunningTime="2026-02-16 14:32:42.421937829 +0000 UTC m=+5361.748651557" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.789319 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.870530 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-scripts\") pod \"e541c102-5476-4214-9555-361d24cef7c9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.870576 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sncz\" (UniqueName: \"kubernetes.io/projected/e541c102-5476-4214-9555-361d24cef7c9-kube-api-access-2sncz\") pod \"e541c102-5476-4214-9555-361d24cef7c9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.870705 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-combined-ca-bundle\") pod \"e541c102-5476-4214-9555-361d24cef7c9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.870765 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-config-data\") pod \"e541c102-5476-4214-9555-361d24cef7c9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.870837 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e541c102-5476-4214-9555-361d24cef7c9-logs\") pod \"e541c102-5476-4214-9555-361d24cef7c9\" (UID: \"e541c102-5476-4214-9555-361d24cef7c9\") " Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.871544 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e541c102-5476-4214-9555-361d24cef7c9-logs" (OuterVolumeSpecName: "logs") pod "e541c102-5476-4214-9555-361d24cef7c9" (UID: "e541c102-5476-4214-9555-361d24cef7c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.877100 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-scripts" (OuterVolumeSpecName: "scripts") pod "e541c102-5476-4214-9555-361d24cef7c9" (UID: "e541c102-5476-4214-9555-361d24cef7c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.893950 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e541c102-5476-4214-9555-361d24cef7c9-kube-api-access-2sncz" (OuterVolumeSpecName: "kube-api-access-2sncz") pod "e541c102-5476-4214-9555-361d24cef7c9" (UID: "e541c102-5476-4214-9555-361d24cef7c9"). InnerVolumeSpecName "kube-api-access-2sncz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.898279 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e541c102-5476-4214-9555-361d24cef7c9" (UID: "e541c102-5476-4214-9555-361d24cef7c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.899933 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-config-data" (OuterVolumeSpecName: "config-data") pod "e541c102-5476-4214-9555-361d24cef7c9" (UID: "e541c102-5476-4214-9555-361d24cef7c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.972694 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.972727 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.972735 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e541c102-5476-4214-9555-361d24cef7c9-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.972744 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e541c102-5476-4214-9555-361d24cef7c9-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:43 crc kubenswrapper[4816]: I0216 14:32:43.972752 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sncz\" (UniqueName: \"kubernetes.io/projected/e541c102-5476-4214-9555-361d24cef7c9-kube-api-access-2sncz\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.399353 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:32:44 crc kubenswrapper[4816]: E0216 14:32:44.399585 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.411883 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-pqcs9" event={"ID":"e541c102-5476-4214-9555-361d24cef7c9","Type":"ContainerDied","Data":"d42f9d3ad8bc396a8357a32f36e999f9b1129b29dd05beb05b6fab0d992fc48d"} Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.412199 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d42f9d3ad8bc396a8357a32f36e999f9b1129b29dd05beb05b6fab0d992fc48d" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.411978 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-pqcs9" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.456413 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5dc688cbd-sxhbw"] Feb 16 14:32:44 crc kubenswrapper[4816]: E0216 14:32:44.462275 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e541c102-5476-4214-9555-361d24cef7c9" containerName="placement-db-sync" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.462548 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e541c102-5476-4214-9555-361d24cef7c9" containerName="placement-db-sync" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.463011 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e541c102-5476-4214-9555-361d24cef7c9" containerName="placement-db-sync" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.464246 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.469356 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-jqswm" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.470290 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.470548 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.471539 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5dc688cbd-sxhbw"] Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.580910 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9g98\" (UniqueName: \"kubernetes.io/projected/1ebfe76f-1f9d-416f-bc50-190f93955818-kube-api-access-j9g98\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.580974 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ebfe76f-1f9d-416f-bc50-190f93955818-logs\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.581084 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ebfe76f-1f9d-416f-bc50-190f93955818-combined-ca-bundle\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.581103 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ebfe76f-1f9d-416f-bc50-190f93955818-config-data\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.581125 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ebfe76f-1f9d-416f-bc50-190f93955818-scripts\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.682805 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ebfe76f-1f9d-416f-bc50-190f93955818-combined-ca-bundle\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.682851 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ebfe76f-1f9d-416f-bc50-190f93955818-config-data\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.682876 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ebfe76f-1f9d-416f-bc50-190f93955818-scripts\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.682915 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9g98\" (UniqueName: \"kubernetes.io/projected/1ebfe76f-1f9d-416f-bc50-190f93955818-kube-api-access-j9g98\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.682942 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ebfe76f-1f9d-416f-bc50-190f93955818-logs\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.683404 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ebfe76f-1f9d-416f-bc50-190f93955818-logs\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.687889 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ebfe76f-1f9d-416f-bc50-190f93955818-scripts\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.688392 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ebfe76f-1f9d-416f-bc50-190f93955818-config-data\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.691071 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ebfe76f-1f9d-416f-bc50-190f93955818-combined-ca-bundle\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.701601 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9g98\" (UniqueName: \"kubernetes.io/projected/1ebfe76f-1f9d-416f-bc50-190f93955818-kube-api-access-j9g98\") pod \"placement-5dc688cbd-sxhbw\" (UID: \"1ebfe76f-1f9d-416f-bc50-190f93955818\") " pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:44 crc kubenswrapper[4816]: I0216 14:32:44.782992 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:45 crc kubenswrapper[4816]: I0216 14:32:45.207820 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5dc688cbd-sxhbw"] Feb 16 14:32:45 crc kubenswrapper[4816]: W0216 14:32:45.215868 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ebfe76f_1f9d_416f_bc50_190f93955818.slice/crio-36d9484e77357f83a22314949385fddd00988d693c00ee65c4980a44f8cb3c0e WatchSource:0}: Error finding container 36d9484e77357f83a22314949385fddd00988d693c00ee65c4980a44f8cb3c0e: Status 404 returned error can't find the container with id 36d9484e77357f83a22314949385fddd00988d693c00ee65c4980a44f8cb3c0e Feb 16 14:32:45 crc kubenswrapper[4816]: I0216 14:32:45.421815 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5dc688cbd-sxhbw" event={"ID":"1ebfe76f-1f9d-416f-bc50-190f93955818","Type":"ContainerStarted","Data":"b2b968554684afa4d634ad4e05b82d5a26b01bf25b5acb5736c1f5f9ba7146a8"} Feb 16 14:32:45 crc kubenswrapper[4816]: I0216 14:32:45.421863 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5dc688cbd-sxhbw" event={"ID":"1ebfe76f-1f9d-416f-bc50-190f93955818","Type":"ContainerStarted","Data":"36d9484e77357f83a22314949385fddd00988d693c00ee65c4980a44f8cb3c0e"} Feb 16 14:32:46 crc kubenswrapper[4816]: I0216 14:32:46.433534 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5dc688cbd-sxhbw" event={"ID":"1ebfe76f-1f9d-416f-bc50-190f93955818","Type":"ContainerStarted","Data":"0b0b5c0cc98c918db5f9e3428be45638ac1fe9915261339ba18c15e0c784737a"} Feb 16 14:32:46 crc kubenswrapper[4816]: I0216 14:32:46.434998 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:46 crc kubenswrapper[4816]: I0216 14:32:46.435035 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:32:46 crc kubenswrapper[4816]: I0216 14:32:46.462063 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5dc688cbd-sxhbw" podStartSLOduration=2.462031926 podStartE2EDuration="2.462031926s" podCreationTimestamp="2026-02-16 14:32:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:32:46.452214359 +0000 UTC m=+5365.778928087" watchObservedRunningTime="2026-02-16 14:32:46.462031926 +0000 UTC m=+5365.788745654" Feb 16 14:32:49 crc kubenswrapper[4816]: I0216 14:32:49.603899 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:32:49 crc kubenswrapper[4816]: I0216 14:32:49.669134 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d9854bc7c-kglmd"] Feb 16 14:32:49 crc kubenswrapper[4816]: I0216 14:32:49.669401 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" podUID="f3820e46-12e4-4e66-94f8-e44215149080" containerName="dnsmasq-dns" containerID="cri-o://8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf" gracePeriod=10 Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.158341 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.316114 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-dns-svc\") pod \"f3820e46-12e4-4e66-94f8-e44215149080\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.316205 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-sb\") pod \"f3820e46-12e4-4e66-94f8-e44215149080\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.316251 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8t9f\" (UniqueName: \"kubernetes.io/projected/f3820e46-12e4-4e66-94f8-e44215149080-kube-api-access-x8t9f\") pod \"f3820e46-12e4-4e66-94f8-e44215149080\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.316343 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-config\") pod \"f3820e46-12e4-4e66-94f8-e44215149080\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.317073 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-nb\") pod \"f3820e46-12e4-4e66-94f8-e44215149080\" (UID: \"f3820e46-12e4-4e66-94f8-e44215149080\") " Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.324469 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3820e46-12e4-4e66-94f8-e44215149080-kube-api-access-x8t9f" (OuterVolumeSpecName: "kube-api-access-x8t9f") pod "f3820e46-12e4-4e66-94f8-e44215149080" (UID: "f3820e46-12e4-4e66-94f8-e44215149080"). InnerVolumeSpecName "kube-api-access-x8t9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.364626 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f3820e46-12e4-4e66-94f8-e44215149080" (UID: "f3820e46-12e4-4e66-94f8-e44215149080"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.367691 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f3820e46-12e4-4e66-94f8-e44215149080" (UID: "f3820e46-12e4-4e66-94f8-e44215149080"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.370101 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f3820e46-12e4-4e66-94f8-e44215149080" (UID: "f3820e46-12e4-4e66-94f8-e44215149080"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.373646 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-config" (OuterVolumeSpecName: "config") pod "f3820e46-12e4-4e66-94f8-e44215149080" (UID: "f3820e46-12e4-4e66-94f8-e44215149080"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.418871 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.418909 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.418924 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8t9f\" (UniqueName: \"kubernetes.io/projected/f3820e46-12e4-4e66-94f8-e44215149080-kube-api-access-x8t9f\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.418938 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.418949 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f3820e46-12e4-4e66-94f8-e44215149080-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.469432 4816 generic.go:334] "Generic (PLEG): container finished" podID="f3820e46-12e4-4e66-94f8-e44215149080" containerID="8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf" exitCode=0 Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.469481 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" event={"ID":"f3820e46-12e4-4e66-94f8-e44215149080","Type":"ContainerDied","Data":"8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf"} Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.469887 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" event={"ID":"f3820e46-12e4-4e66-94f8-e44215149080","Type":"ContainerDied","Data":"9ffc3c180136b356cf16bb01b57b7fb8db3ea21134e643d2f18cbbab39fe24a0"} Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.469510 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d9854bc7c-kglmd" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.469946 4816 scope.go:117] "RemoveContainer" containerID="8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.497016 4816 scope.go:117] "RemoveContainer" containerID="88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.505845 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d9854bc7c-kglmd"] Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.513473 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d9854bc7c-kglmd"] Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.533667 4816 scope.go:117] "RemoveContainer" containerID="8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf" Feb 16 14:32:50 crc kubenswrapper[4816]: E0216 14:32:50.534958 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf\": container with ID starting with 8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf not found: ID does not exist" containerID="8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.535020 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf"} err="failed to get container status \"8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf\": rpc error: code = NotFound desc = could not find container \"8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf\": container with ID starting with 8e5a340d5098edd6f328cfb2d94759804641dd5f99021e38f52bd36937fb0faf not found: ID does not exist" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.535047 4816 scope.go:117] "RemoveContainer" containerID="88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696" Feb 16 14:32:50 crc kubenswrapper[4816]: E0216 14:32:50.535572 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696\": container with ID starting with 88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696 not found: ID does not exist" containerID="88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696" Feb 16 14:32:50 crc kubenswrapper[4816]: I0216 14:32:50.535597 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696"} err="failed to get container status \"88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696\": rpc error: code = NotFound desc = could not find container \"88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696\": container with ID starting with 88af72630f31c57fe3a29ef9edba064d696f7c7dead5266afe091abfccb66696 not found: ID does not exist" Feb 16 14:32:51 crc kubenswrapper[4816]: I0216 14:32:51.409699 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3820e46-12e4-4e66-94f8-e44215149080" path="/var/lib/kubelet/pods/f3820e46-12e4-4e66-94f8-e44215149080/volumes" Feb 16 14:32:57 crc kubenswrapper[4816]: I0216 14:32:57.399157 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:32:57 crc kubenswrapper[4816]: E0216 14:32:57.399954 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.471755 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4rh4c"] Feb 16 14:32:59 crc kubenswrapper[4816]: E0216 14:32:59.472435 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3820e46-12e4-4e66-94f8-e44215149080" containerName="dnsmasq-dns" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.472448 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3820e46-12e4-4e66-94f8-e44215149080" containerName="dnsmasq-dns" Feb 16 14:32:59 crc kubenswrapper[4816]: E0216 14:32:59.472465 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3820e46-12e4-4e66-94f8-e44215149080" containerName="init" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.472472 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3820e46-12e4-4e66-94f8-e44215149080" containerName="init" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.472626 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3820e46-12e4-4e66-94f8-e44215149080" containerName="dnsmasq-dns" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.473835 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.546942 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4rh4c"] Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.641979 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-catalog-content\") pod \"community-operators-4rh4c\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.642062 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvb9w\" (UniqueName: \"kubernetes.io/projected/655c46eb-47d6-4b07-a250-f1151099b1cb-kube-api-access-rvb9w\") pod \"community-operators-4rh4c\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.642092 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-utilities\") pod \"community-operators-4rh4c\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.743360 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-catalog-content\") pod \"community-operators-4rh4c\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.743466 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvb9w\" (UniqueName: \"kubernetes.io/projected/655c46eb-47d6-4b07-a250-f1151099b1cb-kube-api-access-rvb9w\") pod \"community-operators-4rh4c\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.743506 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-utilities\") pod \"community-operators-4rh4c\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.743935 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-utilities\") pod \"community-operators-4rh4c\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.743949 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-catalog-content\") pod \"community-operators-4rh4c\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.763419 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvb9w\" (UniqueName: \"kubernetes.io/projected/655c46eb-47d6-4b07-a250-f1151099b1cb-kube-api-access-rvb9w\") pod \"community-operators-4rh4c\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:32:59 crc kubenswrapper[4816]: I0216 14:32:59.842649 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:33:00 crc kubenswrapper[4816]: I0216 14:33:00.419381 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4rh4c"] Feb 16 14:33:00 crc kubenswrapper[4816]: I0216 14:33:00.553622 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rh4c" event={"ID":"655c46eb-47d6-4b07-a250-f1151099b1cb","Type":"ContainerStarted","Data":"35f8e7b30431402ef6fb338500df272f39022679c6b8afb62be168ccbdefaa2d"} Feb 16 14:33:01 crc kubenswrapper[4816]: I0216 14:33:01.563478 4816 generic.go:334] "Generic (PLEG): container finished" podID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerID="709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49" exitCode=0 Feb 16 14:33:01 crc kubenswrapper[4816]: I0216 14:33:01.563535 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rh4c" event={"ID":"655c46eb-47d6-4b07-a250-f1151099b1cb","Type":"ContainerDied","Data":"709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49"} Feb 16 14:33:02 crc kubenswrapper[4816]: I0216 14:33:02.572810 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rh4c" event={"ID":"655c46eb-47d6-4b07-a250-f1151099b1cb","Type":"ContainerStarted","Data":"7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e"} Feb 16 14:33:03 crc kubenswrapper[4816]: I0216 14:33:03.583353 4816 generic.go:334] "Generic (PLEG): container finished" podID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerID="7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e" exitCode=0 Feb 16 14:33:03 crc kubenswrapper[4816]: I0216 14:33:03.583471 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rh4c" event={"ID":"655c46eb-47d6-4b07-a250-f1151099b1cb","Type":"ContainerDied","Data":"7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e"} Feb 16 14:33:04 crc kubenswrapper[4816]: I0216 14:33:04.594618 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rh4c" event={"ID":"655c46eb-47d6-4b07-a250-f1151099b1cb","Type":"ContainerStarted","Data":"d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958"} Feb 16 14:33:04 crc kubenswrapper[4816]: I0216 14:33:04.616918 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4rh4c" podStartSLOduration=3.183944172 podStartE2EDuration="5.616900302s" podCreationTimestamp="2026-02-16 14:32:59 +0000 UTC" firstStartedPulling="2026-02-16 14:33:01.565250646 +0000 UTC m=+5380.891964384" lastFinishedPulling="2026-02-16 14:33:03.998206786 +0000 UTC m=+5383.324920514" observedRunningTime="2026-02-16 14:33:04.613556181 +0000 UTC m=+5383.940269919" watchObservedRunningTime="2026-02-16 14:33:04.616900302 +0000 UTC m=+5383.943614030" Feb 16 14:33:09 crc kubenswrapper[4816]: I0216 14:33:09.843162 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:33:09 crc kubenswrapper[4816]: I0216 14:33:09.843834 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:33:09 crc kubenswrapper[4816]: I0216 14:33:09.888279 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:33:10 crc kubenswrapper[4816]: I0216 14:33:10.687672 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:33:10 crc kubenswrapper[4816]: I0216 14:33:10.734557 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4rh4c"] Feb 16 14:33:11 crc kubenswrapper[4816]: I0216 14:33:11.409378 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:33:11 crc kubenswrapper[4816]: E0216 14:33:11.409575 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:33:12 crc kubenswrapper[4816]: I0216 14:33:12.660927 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4rh4c" podUID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerName="registry-server" containerID="cri-o://d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958" gracePeriod=2 Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.249946 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.412061 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvb9w\" (UniqueName: \"kubernetes.io/projected/655c46eb-47d6-4b07-a250-f1151099b1cb-kube-api-access-rvb9w\") pod \"655c46eb-47d6-4b07-a250-f1151099b1cb\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.412155 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-utilities\") pod \"655c46eb-47d6-4b07-a250-f1151099b1cb\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.412230 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-catalog-content\") pod \"655c46eb-47d6-4b07-a250-f1151099b1cb\" (UID: \"655c46eb-47d6-4b07-a250-f1151099b1cb\") " Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.413918 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-utilities" (OuterVolumeSpecName: "utilities") pod "655c46eb-47d6-4b07-a250-f1151099b1cb" (UID: "655c46eb-47d6-4b07-a250-f1151099b1cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.418815 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/655c46eb-47d6-4b07-a250-f1151099b1cb-kube-api-access-rvb9w" (OuterVolumeSpecName: "kube-api-access-rvb9w") pod "655c46eb-47d6-4b07-a250-f1151099b1cb" (UID: "655c46eb-47d6-4b07-a250-f1151099b1cb"). InnerVolumeSpecName "kube-api-access-rvb9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.463985 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "655c46eb-47d6-4b07-a250-f1151099b1cb" (UID: "655c46eb-47d6-4b07-a250-f1151099b1cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.514635 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvb9w\" (UniqueName: \"kubernetes.io/projected/655c46eb-47d6-4b07-a250-f1151099b1cb-kube-api-access-rvb9w\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.514691 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.514706 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655c46eb-47d6-4b07-a250-f1151099b1cb-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.670778 4816 generic.go:334] "Generic (PLEG): container finished" podID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerID="d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958" exitCode=0 Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.670821 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4rh4c" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.670841 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rh4c" event={"ID":"655c46eb-47d6-4b07-a250-f1151099b1cb","Type":"ContainerDied","Data":"d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958"} Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.670873 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rh4c" event={"ID":"655c46eb-47d6-4b07-a250-f1151099b1cb","Type":"ContainerDied","Data":"35f8e7b30431402ef6fb338500df272f39022679c6b8afb62be168ccbdefaa2d"} Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.670897 4816 scope.go:117] "RemoveContainer" containerID="d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.691364 4816 scope.go:117] "RemoveContainer" containerID="7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.707450 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4rh4c"] Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.717173 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4rh4c"] Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.733474 4816 scope.go:117] "RemoveContainer" containerID="709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.755696 4816 scope.go:117] "RemoveContainer" containerID="d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958" Feb 16 14:33:13 crc kubenswrapper[4816]: E0216 14:33:13.756669 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958\": container with ID starting with d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958 not found: ID does not exist" containerID="d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.756720 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958"} err="failed to get container status \"d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958\": rpc error: code = NotFound desc = could not find container \"d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958\": container with ID starting with d41dbe80629cfcaacf0cce3e85b389143c5e087dcc9c90b2bafddef0ac01c958 not found: ID does not exist" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.756741 4816 scope.go:117] "RemoveContainer" containerID="7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e" Feb 16 14:33:13 crc kubenswrapper[4816]: E0216 14:33:13.756998 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e\": container with ID starting with 7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e not found: ID does not exist" containerID="7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.757034 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e"} err="failed to get container status \"7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e\": rpc error: code = NotFound desc = could not find container \"7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e\": container with ID starting with 7d0b29039c417746745334d033c71c980e295f51fdf76d55ad9e0bba507a047e not found: ID does not exist" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.757053 4816 scope.go:117] "RemoveContainer" containerID="709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49" Feb 16 14:33:13 crc kubenswrapper[4816]: E0216 14:33:13.757299 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49\": container with ID starting with 709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49 not found: ID does not exist" containerID="709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49" Feb 16 14:33:13 crc kubenswrapper[4816]: I0216 14:33:13.757330 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49"} err="failed to get container status \"709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49\": rpc error: code = NotFound desc = could not find container \"709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49\": container with ID starting with 709829f3b787956569f2d5d60a8afccf0a9e760e377682748d72f0ea73cbcf49 not found: ID does not exist" Feb 16 14:33:15 crc kubenswrapper[4816]: I0216 14:33:15.422367 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="655c46eb-47d6-4b07-a250-f1151099b1cb" path="/var/lib/kubelet/pods/655c46eb-47d6-4b07-a250-f1151099b1cb/volumes" Feb 16 14:33:15 crc kubenswrapper[4816]: I0216 14:33:15.890094 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:33:15 crc kubenswrapper[4816]: I0216 14:33:15.919354 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5dc688cbd-sxhbw" Feb 16 14:33:26 crc kubenswrapper[4816]: I0216 14:33:26.399737 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:33:26 crc kubenswrapper[4816]: E0216 14:33:26.401003 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:33:27 crc kubenswrapper[4816]: I0216 14:33:27.676835 4816 scope.go:117] "RemoveContainer" containerID="3f6e033f6b916fe2be86734750d3e3adb800d0322bdf69273151346bc631866b" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.147704 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-lwjpq"] Feb 16 14:33:39 crc kubenswrapper[4816]: E0216 14:33:39.148484 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerName="extract-content" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.148505 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerName="extract-content" Feb 16 14:33:39 crc kubenswrapper[4816]: E0216 14:33:39.148536 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerName="registry-server" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.148542 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerName="registry-server" Feb 16 14:33:39 crc kubenswrapper[4816]: E0216 14:33:39.148551 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerName="extract-utilities" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.148557 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerName="extract-utilities" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.148772 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="655c46eb-47d6-4b07-a250-f1151099b1cb" containerName="registry-server" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.149372 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.157839 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-lwjpq"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.238577 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-zrq7l"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.239602 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.261672 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hxvq\" (UniqueName: \"kubernetes.io/projected/4be63f67-1999-48c3-a4be-1ca62ea68c7d-kube-api-access-4hxvq\") pod \"nova-api-db-create-lwjpq\" (UID: \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\") " pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.262162 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4be63f67-1999-48c3-a4be-1ca62ea68c7d-operator-scripts\") pod \"nova-api-db-create-lwjpq\" (UID: \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\") " pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.273435 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-zrq7l"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.349451 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-3a52-account-create-update-7j59j"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.350778 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.353648 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.363764 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hxvq\" (UniqueName: \"kubernetes.io/projected/4be63f67-1999-48c3-a4be-1ca62ea68c7d-kube-api-access-4hxvq\") pod \"nova-api-db-create-lwjpq\" (UID: \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\") " pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.363867 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9jrg\" (UniqueName: \"kubernetes.io/projected/05901662-9692-4c04-a7df-c902018f3013-kube-api-access-c9jrg\") pod \"nova-cell0-db-create-zrq7l\" (UID: \"05901662-9692-4c04-a7df-c902018f3013\") " pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.364069 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05901662-9692-4c04-a7df-c902018f3013-operator-scripts\") pod \"nova-cell0-db-create-zrq7l\" (UID: \"05901662-9692-4c04-a7df-c902018f3013\") " pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.364281 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4be63f67-1999-48c3-a4be-1ca62ea68c7d-operator-scripts\") pod \"nova-api-db-create-lwjpq\" (UID: \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\") " pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.365026 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4be63f67-1999-48c3-a4be-1ca62ea68c7d-operator-scripts\") pod \"nova-api-db-create-lwjpq\" (UID: \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\") " pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.370432 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3a52-account-create-update-7j59j"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.397887 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hxvq\" (UniqueName: \"kubernetes.io/projected/4be63f67-1999-48c3-a4be-1ca62ea68c7d-kube-api-access-4hxvq\") pod \"nova-api-db-create-lwjpq\" (UID: \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\") " pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.448391 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-7tqgn"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.449594 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.460498 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-7tqgn"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.466358 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-operator-scripts\") pod \"nova-api-3a52-account-create-update-7j59j\" (UID: \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\") " pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.466412 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05901662-9692-4c04-a7df-c902018f3013-operator-scripts\") pod \"nova-cell0-db-create-zrq7l\" (UID: \"05901662-9692-4c04-a7df-c902018f3013\") " pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.466514 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9jrg\" (UniqueName: \"kubernetes.io/projected/05901662-9692-4c04-a7df-c902018f3013-kube-api-access-c9jrg\") pod \"nova-cell0-db-create-zrq7l\" (UID: \"05901662-9692-4c04-a7df-c902018f3013\") " pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.466542 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h2nj\" (UniqueName: \"kubernetes.io/projected/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-kube-api-access-4h2nj\") pod \"nova-api-3a52-account-create-update-7j59j\" (UID: \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\") " pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.467339 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05901662-9692-4c04-a7df-c902018f3013-operator-scripts\") pod \"nova-cell0-db-create-zrq7l\" (UID: \"05901662-9692-4c04-a7df-c902018f3013\") " pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.479643 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.487441 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9jrg\" (UniqueName: \"kubernetes.io/projected/05901662-9692-4c04-a7df-c902018f3013-kube-api-access-c9jrg\") pod \"nova-cell0-db-create-zrq7l\" (UID: \"05901662-9692-4c04-a7df-c902018f3013\") " pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.555049 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-7401-account-create-update-r6tsd"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.557967 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.563377 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.564981 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-7401-account-create-update-r6tsd"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.565281 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.569948 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-operator-scripts\") pod \"nova-cell1-db-create-7tqgn\" (UID: \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\") " pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.570071 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h2nj\" (UniqueName: \"kubernetes.io/projected/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-kube-api-access-4h2nj\") pod \"nova-api-3a52-account-create-update-7j59j\" (UID: \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\") " pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.570181 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-operator-scripts\") pod \"nova-api-3a52-account-create-update-7j59j\" (UID: \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\") " pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.570213 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qqcz\" (UniqueName: \"kubernetes.io/projected/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-kube-api-access-6qqcz\") pod \"nova-cell1-db-create-7tqgn\" (UID: \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\") " pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.572090 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-operator-scripts\") pod \"nova-api-3a52-account-create-update-7j59j\" (UID: \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\") " pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.598747 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h2nj\" (UniqueName: \"kubernetes.io/projected/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-kube-api-access-4h2nj\") pod \"nova-api-3a52-account-create-update-7j59j\" (UID: \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\") " pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.668235 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.672083 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qqcz\" (UniqueName: \"kubernetes.io/projected/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-kube-api-access-6qqcz\") pod \"nova-cell1-db-create-7tqgn\" (UID: \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\") " pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.672119 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9981546-326e-49bb-9f26-c2e9e3ce4482-operator-scripts\") pod \"nova-cell0-7401-account-create-update-r6tsd\" (UID: \"b9981546-326e-49bb-9f26-c2e9e3ce4482\") " pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.672224 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qlws\" (UniqueName: \"kubernetes.io/projected/b9981546-326e-49bb-9f26-c2e9e3ce4482-kube-api-access-4qlws\") pod \"nova-cell0-7401-account-create-update-r6tsd\" (UID: \"b9981546-326e-49bb-9f26-c2e9e3ce4482\") " pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.672262 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-operator-scripts\") pod \"nova-cell1-db-create-7tqgn\" (UID: \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\") " pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.673029 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-operator-scripts\") pod \"nova-cell1-db-create-7tqgn\" (UID: \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\") " pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.692436 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qqcz\" (UniqueName: \"kubernetes.io/projected/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-kube-api-access-6qqcz\") pod \"nova-cell1-db-create-7tqgn\" (UID: \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\") " pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.760448 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-73d7-account-create-update-2scwn"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.762041 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.764262 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.772546 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-73d7-account-create-update-2scwn"] Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.773825 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9981546-326e-49bb-9f26-c2e9e3ce4482-operator-scripts\") pod \"nova-cell0-7401-account-create-update-r6tsd\" (UID: \"b9981546-326e-49bb-9f26-c2e9e3ce4482\") " pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.773968 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qlws\" (UniqueName: \"kubernetes.io/projected/b9981546-326e-49bb-9f26-c2e9e3ce4482-kube-api-access-4qlws\") pod \"nova-cell0-7401-account-create-update-r6tsd\" (UID: \"b9981546-326e-49bb-9f26-c2e9e3ce4482\") " pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.774746 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9981546-326e-49bb-9f26-c2e9e3ce4482-operator-scripts\") pod \"nova-cell0-7401-account-create-update-r6tsd\" (UID: \"b9981546-326e-49bb-9f26-c2e9e3ce4482\") " pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.783823 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.793010 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qlws\" (UniqueName: \"kubernetes.io/projected/b9981546-326e-49bb-9f26-c2e9e3ce4482-kube-api-access-4qlws\") pod \"nova-cell0-7401-account-create-update-r6tsd\" (UID: \"b9981546-326e-49bb-9f26-c2e9e3ce4482\") " pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.875809 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqc47\" (UniqueName: \"kubernetes.io/projected/80db2d83-4fd4-4f79-adf2-82058b98081c-kube-api-access-fqc47\") pod \"nova-cell1-73d7-account-create-update-2scwn\" (UID: \"80db2d83-4fd4-4f79-adf2-82058b98081c\") " pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.875885 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80db2d83-4fd4-4f79-adf2-82058b98081c-operator-scripts\") pod \"nova-cell1-73d7-account-create-update-2scwn\" (UID: \"80db2d83-4fd4-4f79-adf2-82058b98081c\") " pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.895643 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.977892 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80db2d83-4fd4-4f79-adf2-82058b98081c-operator-scripts\") pod \"nova-cell1-73d7-account-create-update-2scwn\" (UID: \"80db2d83-4fd4-4f79-adf2-82058b98081c\") " pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.978085 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqc47\" (UniqueName: \"kubernetes.io/projected/80db2d83-4fd4-4f79-adf2-82058b98081c-kube-api-access-fqc47\") pod \"nova-cell1-73d7-account-create-update-2scwn\" (UID: \"80db2d83-4fd4-4f79-adf2-82058b98081c\") " pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.979478 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80db2d83-4fd4-4f79-adf2-82058b98081c-operator-scripts\") pod \"nova-cell1-73d7-account-create-update-2scwn\" (UID: \"80db2d83-4fd4-4f79-adf2-82058b98081c\") " pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:39 crc kubenswrapper[4816]: I0216 14:33:39.999493 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqc47\" (UniqueName: \"kubernetes.io/projected/80db2d83-4fd4-4f79-adf2-82058b98081c-kube-api-access-fqc47\") pod \"nova-cell1-73d7-account-create-update-2scwn\" (UID: \"80db2d83-4fd4-4f79-adf2-82058b98081c\") " pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.013839 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-lwjpq"] Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.084024 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.137058 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-zrq7l"] Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.252525 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3a52-account-create-update-7j59j"] Feb 16 14:33:40 crc kubenswrapper[4816]: W0216 14:33:40.263047 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fe07d97_0b42_4b0d_933a_fd9fa23711f2.slice/crio-63db95dececdc81f84bd41289c977178b46db40fd7e7266269dbe40315be3990 WatchSource:0}: Error finding container 63db95dececdc81f84bd41289c977178b46db40fd7e7266269dbe40315be3990: Status 404 returned error can't find the container with id 63db95dececdc81f84bd41289c977178b46db40fd7e7266269dbe40315be3990 Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.316730 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-7tqgn"] Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.443164 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-7401-account-create-update-r6tsd"] Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.629168 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-73d7-account-create-update-2scwn"] Feb 16 14:33:40 crc kubenswrapper[4816]: W0216 14:33:40.656594 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80db2d83_4fd4_4f79_adf2_82058b98081c.slice/crio-3f3b4eab10a8c8408ad778bbb527a9e7b9e1f7cbf5f310f25517f3e4843ffdc9 WatchSource:0}: Error finding container 3f3b4eab10a8c8408ad778bbb527a9e7b9e1f7cbf5f310f25517f3e4843ffdc9: Status 404 returned error can't find the container with id 3f3b4eab10a8c8408ad778bbb527a9e7b9e1f7cbf5f310f25517f3e4843ffdc9 Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.903709 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-7401-account-create-update-r6tsd" event={"ID":"b9981546-326e-49bb-9f26-c2e9e3ce4482","Type":"ContainerStarted","Data":"bee255755502550560bd0f35053ddb28b3411e19d961270f163245789114399b"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.903762 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-7401-account-create-update-r6tsd" event={"ID":"b9981546-326e-49bb-9f26-c2e9e3ce4482","Type":"ContainerStarted","Data":"6476ac0c37c569c0e418d08555c5c2e121b3c8a9fe05f27688d38d9f2ae2e51a"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.905307 4816 generic.go:334] "Generic (PLEG): container finished" podID="4be63f67-1999-48c3-a4be-1ca62ea68c7d" containerID="056343b4ef6faa891f7fa46300aff17a63ef02d5acd482d4766ac3918daa7b14" exitCode=0 Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.905374 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lwjpq" event={"ID":"4be63f67-1999-48c3-a4be-1ca62ea68c7d","Type":"ContainerDied","Data":"056343b4ef6faa891f7fa46300aff17a63ef02d5acd482d4766ac3918daa7b14"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.905396 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lwjpq" event={"ID":"4be63f67-1999-48c3-a4be-1ca62ea68c7d","Type":"ContainerStarted","Data":"34604f1c23236d110046a348db4a678c9ffcafb6205f88ab84c615b359ee3be9"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.907099 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-73d7-account-create-update-2scwn" event={"ID":"80db2d83-4fd4-4f79-adf2-82058b98081c","Type":"ContainerStarted","Data":"2025918f770e24de3e670744ef56c08acedc38816c4db660859f1a863bb571c9"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.907131 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-73d7-account-create-update-2scwn" event={"ID":"80db2d83-4fd4-4f79-adf2-82058b98081c","Type":"ContainerStarted","Data":"3f3b4eab10a8c8408ad778bbb527a9e7b9e1f7cbf5f310f25517f3e4843ffdc9"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.908591 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7tqgn" event={"ID":"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b","Type":"ContainerStarted","Data":"fb5b40fc25c161c49d8e03ae846098440af71b01c62f721f9e88f67ea4a423b2"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.908647 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7tqgn" event={"ID":"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b","Type":"ContainerStarted","Data":"e61acac956d885e6dfa15e56adb5caf0b3de64efdc99481510ad357567845c34"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.909807 4816 generic.go:334] "Generic (PLEG): container finished" podID="05901662-9692-4c04-a7df-c902018f3013" containerID="c794b0113a836fee1d955faf272d5fdc94601342b3dda31ee2d32b74eef46567" exitCode=0 Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.909836 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-zrq7l" event={"ID":"05901662-9692-4c04-a7df-c902018f3013","Type":"ContainerDied","Data":"c794b0113a836fee1d955faf272d5fdc94601342b3dda31ee2d32b74eef46567"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.909868 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-zrq7l" event={"ID":"05901662-9692-4c04-a7df-c902018f3013","Type":"ContainerStarted","Data":"b76acffd09bdc2da0b5f1c93c645e99abe4928646a48ee58f33ba7703b99a112"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.911093 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3a52-account-create-update-7j59j" event={"ID":"0fe07d97-0b42-4b0d-933a-fd9fa23711f2","Type":"ContainerStarted","Data":"793795fa46642e25f4a645c053b1ad6d327c2889a57a94cdada80daf5155f676"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.911140 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3a52-account-create-update-7j59j" event={"ID":"0fe07d97-0b42-4b0d-933a-fd9fa23711f2","Type":"ContainerStarted","Data":"63db95dececdc81f84bd41289c977178b46db40fd7e7266269dbe40315be3990"} Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.930054 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-7401-account-create-update-r6tsd" podStartSLOduration=1.9300129799999999 podStartE2EDuration="1.93001298s" podCreationTimestamp="2026-02-16 14:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:33:40.918091384 +0000 UTC m=+5420.244805132" watchObservedRunningTime="2026-02-16 14:33:40.93001298 +0000 UTC m=+5420.256726708" Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.943381 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-7tqgn" podStartSLOduration=1.943360983 podStartE2EDuration="1.943360983s" podCreationTimestamp="2026-02-16 14:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:33:40.933452404 +0000 UTC m=+5420.260166152" watchObservedRunningTime="2026-02-16 14:33:40.943360983 +0000 UTC m=+5420.270074711" Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.954133 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-73d7-account-create-update-2scwn" podStartSLOduration=1.954107657 podStartE2EDuration="1.954107657s" podCreationTimestamp="2026-02-16 14:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:33:40.949858801 +0000 UTC m=+5420.276572539" watchObservedRunningTime="2026-02-16 14:33:40.954107657 +0000 UTC m=+5420.280821385" Feb 16 14:33:40 crc kubenswrapper[4816]: I0216 14:33:40.971094 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-3a52-account-create-update-7j59j" podStartSLOduration=1.971071769 podStartE2EDuration="1.971071769s" podCreationTimestamp="2026-02-16 14:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:33:40.96523567 +0000 UTC m=+5420.291949398" watchObservedRunningTime="2026-02-16 14:33:40.971071769 +0000 UTC m=+5420.297785497" Feb 16 14:33:41 crc kubenswrapper[4816]: I0216 14:33:41.407596 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:33:41 crc kubenswrapper[4816]: E0216 14:33:41.408591 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:33:41 crc kubenswrapper[4816]: I0216 14:33:41.922534 4816 generic.go:334] "Generic (PLEG): container finished" podID="80db2d83-4fd4-4f79-adf2-82058b98081c" containerID="2025918f770e24de3e670744ef56c08acedc38816c4db660859f1a863bb571c9" exitCode=0 Feb 16 14:33:41 crc kubenswrapper[4816]: I0216 14:33:41.922633 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-73d7-account-create-update-2scwn" event={"ID":"80db2d83-4fd4-4f79-adf2-82058b98081c","Type":"ContainerDied","Data":"2025918f770e24de3e670744ef56c08acedc38816c4db660859f1a863bb571c9"} Feb 16 14:33:41 crc kubenswrapper[4816]: I0216 14:33:41.924070 4816 generic.go:334] "Generic (PLEG): container finished" podID="0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b" containerID="fb5b40fc25c161c49d8e03ae846098440af71b01c62f721f9e88f67ea4a423b2" exitCode=0 Feb 16 14:33:41 crc kubenswrapper[4816]: I0216 14:33:41.924135 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7tqgn" event={"ID":"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b","Type":"ContainerDied","Data":"fb5b40fc25c161c49d8e03ae846098440af71b01c62f721f9e88f67ea4a423b2"} Feb 16 14:33:41 crc kubenswrapper[4816]: I0216 14:33:41.927163 4816 generic.go:334] "Generic (PLEG): container finished" podID="0fe07d97-0b42-4b0d-933a-fd9fa23711f2" containerID="793795fa46642e25f4a645c053b1ad6d327c2889a57a94cdada80daf5155f676" exitCode=0 Feb 16 14:33:41 crc kubenswrapper[4816]: I0216 14:33:41.927243 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3a52-account-create-update-7j59j" event={"ID":"0fe07d97-0b42-4b0d-933a-fd9fa23711f2","Type":"ContainerDied","Data":"793795fa46642e25f4a645c053b1ad6d327c2889a57a94cdada80daf5155f676"} Feb 16 14:33:41 crc kubenswrapper[4816]: I0216 14:33:41.929090 4816 generic.go:334] "Generic (PLEG): container finished" podID="b9981546-326e-49bb-9f26-c2e9e3ce4482" containerID="bee255755502550560bd0f35053ddb28b3411e19d961270f163245789114399b" exitCode=0 Feb 16 14:33:41 crc kubenswrapper[4816]: I0216 14:33:41.929151 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-7401-account-create-update-r6tsd" event={"ID":"b9981546-326e-49bb-9f26-c2e9e3ce4482","Type":"ContainerDied","Data":"bee255755502550560bd0f35053ddb28b3411e19d961270f163245789114399b"} Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.324529 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.333952 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.422596 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hxvq\" (UniqueName: \"kubernetes.io/projected/4be63f67-1999-48c3-a4be-1ca62ea68c7d-kube-api-access-4hxvq\") pod \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\" (UID: \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\") " Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.422636 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9jrg\" (UniqueName: \"kubernetes.io/projected/05901662-9692-4c04-a7df-c902018f3013-kube-api-access-c9jrg\") pod \"05901662-9692-4c04-a7df-c902018f3013\" (UID: \"05901662-9692-4c04-a7df-c902018f3013\") " Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.422811 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4be63f67-1999-48c3-a4be-1ca62ea68c7d-operator-scripts\") pod \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\" (UID: \"4be63f67-1999-48c3-a4be-1ca62ea68c7d\") " Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.422882 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05901662-9692-4c04-a7df-c902018f3013-operator-scripts\") pod \"05901662-9692-4c04-a7df-c902018f3013\" (UID: \"05901662-9692-4c04-a7df-c902018f3013\") " Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.424139 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05901662-9692-4c04-a7df-c902018f3013-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "05901662-9692-4c04-a7df-c902018f3013" (UID: "05901662-9692-4c04-a7df-c902018f3013"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.424174 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be63f67-1999-48c3-a4be-1ca62ea68c7d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4be63f67-1999-48c3-a4be-1ca62ea68c7d" (UID: "4be63f67-1999-48c3-a4be-1ca62ea68c7d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.428548 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05901662-9692-4c04-a7df-c902018f3013-kube-api-access-c9jrg" (OuterVolumeSpecName: "kube-api-access-c9jrg") pod "05901662-9692-4c04-a7df-c902018f3013" (UID: "05901662-9692-4c04-a7df-c902018f3013"). InnerVolumeSpecName "kube-api-access-c9jrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.429212 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4be63f67-1999-48c3-a4be-1ca62ea68c7d-kube-api-access-4hxvq" (OuterVolumeSpecName: "kube-api-access-4hxvq") pod "4be63f67-1999-48c3-a4be-1ca62ea68c7d" (UID: "4be63f67-1999-48c3-a4be-1ca62ea68c7d"). InnerVolumeSpecName "kube-api-access-4hxvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.525317 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05901662-9692-4c04-a7df-c902018f3013-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.525374 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hxvq\" (UniqueName: \"kubernetes.io/projected/4be63f67-1999-48c3-a4be-1ca62ea68c7d-kube-api-access-4hxvq\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.525385 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9jrg\" (UniqueName: \"kubernetes.io/projected/05901662-9692-4c04-a7df-c902018f3013-kube-api-access-c9jrg\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.525394 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4be63f67-1999-48c3-a4be-1ca62ea68c7d-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.941542 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-zrq7l" event={"ID":"05901662-9692-4c04-a7df-c902018f3013","Type":"ContainerDied","Data":"b76acffd09bdc2da0b5f1c93c645e99abe4928646a48ee58f33ba7703b99a112"} Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.941600 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b76acffd09bdc2da0b5f1c93c645e99abe4928646a48ee58f33ba7703b99a112" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.941634 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-zrq7l" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.943930 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lwjpq" event={"ID":"4be63f67-1999-48c3-a4be-1ca62ea68c7d","Type":"ContainerDied","Data":"34604f1c23236d110046a348db4a678c9ffcafb6205f88ab84c615b359ee3be9"} Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.943976 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34604f1c23236d110046a348db4a678c9ffcafb6205f88ab84c615b359ee3be9" Feb 16 14:33:42 crc kubenswrapper[4816]: I0216 14:33:42.944050 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lwjpq" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.312400 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.421939 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.427922 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.437837 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.441079 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80db2d83-4fd4-4f79-adf2-82058b98081c-operator-scripts\") pod \"80db2d83-4fd4-4f79-adf2-82058b98081c\" (UID: \"80db2d83-4fd4-4f79-adf2-82058b98081c\") " Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.441354 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqc47\" (UniqueName: \"kubernetes.io/projected/80db2d83-4fd4-4f79-adf2-82058b98081c-kube-api-access-fqc47\") pod \"80db2d83-4fd4-4f79-adf2-82058b98081c\" (UID: \"80db2d83-4fd4-4f79-adf2-82058b98081c\") " Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.443036 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80db2d83-4fd4-4f79-adf2-82058b98081c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "80db2d83-4fd4-4f79-adf2-82058b98081c" (UID: "80db2d83-4fd4-4f79-adf2-82058b98081c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.446236 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80db2d83-4fd4-4f79-adf2-82058b98081c-kube-api-access-fqc47" (OuterVolumeSpecName: "kube-api-access-fqc47") pod "80db2d83-4fd4-4f79-adf2-82058b98081c" (UID: "80db2d83-4fd4-4f79-adf2-82058b98081c"). InnerVolumeSpecName "kube-api-access-fqc47". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.543218 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qlws\" (UniqueName: \"kubernetes.io/projected/b9981546-326e-49bb-9f26-c2e9e3ce4482-kube-api-access-4qlws\") pod \"b9981546-326e-49bb-9f26-c2e9e3ce4482\" (UID: \"b9981546-326e-49bb-9f26-c2e9e3ce4482\") " Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.543309 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-operator-scripts\") pod \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\" (UID: \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\") " Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.543338 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qqcz\" (UniqueName: \"kubernetes.io/projected/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-kube-api-access-6qqcz\") pod \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\" (UID: \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\") " Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.543406 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h2nj\" (UniqueName: \"kubernetes.io/projected/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-kube-api-access-4h2nj\") pod \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\" (UID: \"0fe07d97-0b42-4b0d-933a-fd9fa23711f2\") " Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.543437 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9981546-326e-49bb-9f26-c2e9e3ce4482-operator-scripts\") pod \"b9981546-326e-49bb-9f26-c2e9e3ce4482\" (UID: \"b9981546-326e-49bb-9f26-c2e9e3ce4482\") " Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.543479 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-operator-scripts\") pod \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\" (UID: \"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b\") " Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.543952 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqc47\" (UniqueName: \"kubernetes.io/projected/80db2d83-4fd4-4f79-adf2-82058b98081c-kube-api-access-fqc47\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.543974 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80db2d83-4fd4-4f79-adf2-82058b98081c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.544153 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9981546-326e-49bb-9f26-c2e9e3ce4482-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b9981546-326e-49bb-9f26-c2e9e3ce4482" (UID: "b9981546-326e-49bb-9f26-c2e9e3ce4482"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.544189 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0fe07d97-0b42-4b0d-933a-fd9fa23711f2" (UID: "0fe07d97-0b42-4b0d-933a-fd9fa23711f2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.544542 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b" (UID: "0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.546861 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9981546-326e-49bb-9f26-c2e9e3ce4482-kube-api-access-4qlws" (OuterVolumeSpecName: "kube-api-access-4qlws") pod "b9981546-326e-49bb-9f26-c2e9e3ce4482" (UID: "b9981546-326e-49bb-9f26-c2e9e3ce4482"). InnerVolumeSpecName "kube-api-access-4qlws". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.546942 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-kube-api-access-6qqcz" (OuterVolumeSpecName: "kube-api-access-6qqcz") pod "0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b" (UID: "0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b"). InnerVolumeSpecName "kube-api-access-6qqcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.546993 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-kube-api-access-4h2nj" (OuterVolumeSpecName: "kube-api-access-4h2nj") pod "0fe07d97-0b42-4b0d-933a-fd9fa23711f2" (UID: "0fe07d97-0b42-4b0d-933a-fd9fa23711f2"). InnerVolumeSpecName "kube-api-access-4h2nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.645194 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.645416 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qqcz\" (UniqueName: \"kubernetes.io/projected/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-kube-api-access-6qqcz\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.645511 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h2nj\" (UniqueName: \"kubernetes.io/projected/0fe07d97-0b42-4b0d-933a-fd9fa23711f2-kube-api-access-4h2nj\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.645577 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9981546-326e-49bb-9f26-c2e9e3ce4482-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.645635 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.645749 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qlws\" (UniqueName: \"kubernetes.io/projected/b9981546-326e-49bb-9f26-c2e9e3ce4482-kube-api-access-4qlws\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.954807 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-73d7-account-create-update-2scwn" event={"ID":"80db2d83-4fd4-4f79-adf2-82058b98081c","Type":"ContainerDied","Data":"3f3b4eab10a8c8408ad778bbb527a9e7b9e1f7cbf5f310f25517f3e4843ffdc9"} Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.954858 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f3b4eab10a8c8408ad778bbb527a9e7b9e1f7cbf5f310f25517f3e4843ffdc9" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.954959 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-73d7-account-create-update-2scwn" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.958888 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7tqgn" event={"ID":"0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b","Type":"ContainerDied","Data":"e61acac956d885e6dfa15e56adb5caf0b3de64efdc99481510ad357567845c34"} Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.958932 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e61acac956d885e6dfa15e56adb5caf0b3de64efdc99481510ad357567845c34" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.959016 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7tqgn" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.962100 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3a52-account-create-update-7j59j" event={"ID":"0fe07d97-0b42-4b0d-933a-fd9fa23711f2","Type":"ContainerDied","Data":"63db95dececdc81f84bd41289c977178b46db40fd7e7266269dbe40315be3990"} Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.962295 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63db95dececdc81f84bd41289c977178b46db40fd7e7266269dbe40315be3990" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.962417 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3a52-account-create-update-7j59j" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.967492 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-7401-account-create-update-r6tsd" event={"ID":"b9981546-326e-49bb-9f26-c2e9e3ce4482","Type":"ContainerDied","Data":"6476ac0c37c569c0e418d08555c5c2e121b3c8a9fe05f27688d38d9f2ae2e51a"} Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.967564 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6476ac0c37c569c0e418d08555c5c2e121b3c8a9fe05f27688d38d9f2ae2e51a" Feb 16 14:33:43 crc kubenswrapper[4816]: I0216 14:33:43.967647 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-7401-account-create-update-r6tsd" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.751219 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sgf4b"] Feb 16 14:33:44 crc kubenswrapper[4816]: E0216 14:33:44.751950 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fe07d97-0b42-4b0d-933a-fd9fa23711f2" containerName="mariadb-account-create-update" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.751964 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fe07d97-0b42-4b0d-933a-fd9fa23711f2" containerName="mariadb-account-create-update" Feb 16 14:33:44 crc kubenswrapper[4816]: E0216 14:33:44.751980 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b" containerName="mariadb-database-create" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.751987 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b" containerName="mariadb-database-create" Feb 16 14:33:44 crc kubenswrapper[4816]: E0216 14:33:44.752006 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4be63f67-1999-48c3-a4be-1ca62ea68c7d" containerName="mariadb-database-create" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752012 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4be63f67-1999-48c3-a4be-1ca62ea68c7d" containerName="mariadb-database-create" Feb 16 14:33:44 crc kubenswrapper[4816]: E0216 14:33:44.752024 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80db2d83-4fd4-4f79-adf2-82058b98081c" containerName="mariadb-account-create-update" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752031 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="80db2d83-4fd4-4f79-adf2-82058b98081c" containerName="mariadb-account-create-update" Feb 16 14:33:44 crc kubenswrapper[4816]: E0216 14:33:44.752046 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9981546-326e-49bb-9f26-c2e9e3ce4482" containerName="mariadb-account-create-update" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752054 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9981546-326e-49bb-9f26-c2e9e3ce4482" containerName="mariadb-account-create-update" Feb 16 14:33:44 crc kubenswrapper[4816]: E0216 14:33:44.752069 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05901662-9692-4c04-a7df-c902018f3013" containerName="mariadb-database-create" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752075 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="05901662-9692-4c04-a7df-c902018f3013" containerName="mariadb-database-create" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752290 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4be63f67-1999-48c3-a4be-1ca62ea68c7d" containerName="mariadb-database-create" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752312 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b" containerName="mariadb-database-create" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752325 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="80db2d83-4fd4-4f79-adf2-82058b98081c" containerName="mariadb-account-create-update" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752338 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9981546-326e-49bb-9f26-c2e9e3ce4482" containerName="mariadb-account-create-update" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752348 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="05901662-9692-4c04-a7df-c902018f3013" containerName="mariadb-database-create" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.752361 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fe07d97-0b42-4b0d-933a-fd9fa23711f2" containerName="mariadb-account-create-update" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.753116 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.755879 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-dzxmw" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.756203 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.756367 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.764879 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sgf4b"] Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.868459 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w99kl\" (UniqueName: \"kubernetes.io/projected/f52dc84a-938b-4013-b9d1-fbcdb6360261-kube-api-access-w99kl\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.868887 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-config-data\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.869066 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-scripts\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.869159 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.971004 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-config-data\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.971133 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-scripts\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.971183 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.971254 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w99kl\" (UniqueName: \"kubernetes.io/projected/f52dc84a-938b-4013-b9d1-fbcdb6360261-kube-api-access-w99kl\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.980169 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-scripts\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.980577 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-config-data\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.984335 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:44 crc kubenswrapper[4816]: I0216 14:33:44.990309 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w99kl\" (UniqueName: \"kubernetes.io/projected/f52dc84a-938b-4013-b9d1-fbcdb6360261-kube-api-access-w99kl\") pod \"nova-cell0-conductor-db-sync-sgf4b\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:45 crc kubenswrapper[4816]: I0216 14:33:45.069904 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:45 crc kubenswrapper[4816]: I0216 14:33:45.530446 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sgf4b"] Feb 16 14:33:45 crc kubenswrapper[4816]: W0216 14:33:45.534545 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf52dc84a_938b_4013_b9d1_fbcdb6360261.slice/crio-e98b4f3ffd01fae0dff3e15fbf4f3742494f5ae088bcb17c804806a4ea81e6ca WatchSource:0}: Error finding container e98b4f3ffd01fae0dff3e15fbf4f3742494f5ae088bcb17c804806a4ea81e6ca: Status 404 returned error can't find the container with id e98b4f3ffd01fae0dff3e15fbf4f3742494f5ae088bcb17c804806a4ea81e6ca Feb 16 14:33:45 crc kubenswrapper[4816]: I0216 14:33:45.995239 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-sgf4b" event={"ID":"f52dc84a-938b-4013-b9d1-fbcdb6360261","Type":"ContainerStarted","Data":"edf7fb2c209872600a4b2481a03e7911cd3d8f0f884205f502226613dd89d639"} Feb 16 14:33:45 crc kubenswrapper[4816]: I0216 14:33:45.995289 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-sgf4b" event={"ID":"f52dc84a-938b-4013-b9d1-fbcdb6360261","Type":"ContainerStarted","Data":"e98b4f3ffd01fae0dff3e15fbf4f3742494f5ae088bcb17c804806a4ea81e6ca"} Feb 16 14:33:46 crc kubenswrapper[4816]: I0216 14:33:46.032430 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-sgf4b" podStartSLOduration=2.032406532 podStartE2EDuration="2.032406532s" podCreationTimestamp="2026-02-16 14:33:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:33:46.020319792 +0000 UTC m=+5425.347033570" watchObservedRunningTime="2026-02-16 14:33:46.032406532 +0000 UTC m=+5425.359120260" Feb 16 14:33:52 crc kubenswrapper[4816]: I0216 14:33:52.052471 4816 generic.go:334] "Generic (PLEG): container finished" podID="f52dc84a-938b-4013-b9d1-fbcdb6360261" containerID="edf7fb2c209872600a4b2481a03e7911cd3d8f0f884205f502226613dd89d639" exitCode=0 Feb 16 14:33:52 crc kubenswrapper[4816]: I0216 14:33:52.053128 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-sgf4b" event={"ID":"f52dc84a-938b-4013-b9d1-fbcdb6360261","Type":"ContainerDied","Data":"edf7fb2c209872600a4b2481a03e7911cd3d8f0f884205f502226613dd89d639"} Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.418561 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.517627 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w99kl\" (UniqueName: \"kubernetes.io/projected/f52dc84a-938b-4013-b9d1-fbcdb6360261-kube-api-access-w99kl\") pod \"f52dc84a-938b-4013-b9d1-fbcdb6360261\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.517736 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-scripts\") pod \"f52dc84a-938b-4013-b9d1-fbcdb6360261\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.517788 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-combined-ca-bundle\") pod \"f52dc84a-938b-4013-b9d1-fbcdb6360261\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.517846 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-config-data\") pod \"f52dc84a-938b-4013-b9d1-fbcdb6360261\" (UID: \"f52dc84a-938b-4013-b9d1-fbcdb6360261\") " Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.524215 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f52dc84a-938b-4013-b9d1-fbcdb6360261-kube-api-access-w99kl" (OuterVolumeSpecName: "kube-api-access-w99kl") pod "f52dc84a-938b-4013-b9d1-fbcdb6360261" (UID: "f52dc84a-938b-4013-b9d1-fbcdb6360261"). InnerVolumeSpecName "kube-api-access-w99kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.524736 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-scripts" (OuterVolumeSpecName: "scripts") pod "f52dc84a-938b-4013-b9d1-fbcdb6360261" (UID: "f52dc84a-938b-4013-b9d1-fbcdb6360261"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.542443 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-config-data" (OuterVolumeSpecName: "config-data") pod "f52dc84a-938b-4013-b9d1-fbcdb6360261" (UID: "f52dc84a-938b-4013-b9d1-fbcdb6360261"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.546258 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f52dc84a-938b-4013-b9d1-fbcdb6360261" (UID: "f52dc84a-938b-4013-b9d1-fbcdb6360261"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.620080 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w99kl\" (UniqueName: \"kubernetes.io/projected/f52dc84a-938b-4013-b9d1-fbcdb6360261-kube-api-access-w99kl\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.620119 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.620132 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:53 crc kubenswrapper[4816]: I0216 14:33:53.620145 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52dc84a-938b-4013-b9d1-fbcdb6360261-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.069793 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-sgf4b" event={"ID":"f52dc84a-938b-4013-b9d1-fbcdb6360261","Type":"ContainerDied","Data":"e98b4f3ffd01fae0dff3e15fbf4f3742494f5ae088bcb17c804806a4ea81e6ca"} Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.069829 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e98b4f3ffd01fae0dff3e15fbf4f3742494f5ae088bcb17c804806a4ea81e6ca" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.069900 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-sgf4b" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.161216 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 14:33:54 crc kubenswrapper[4816]: E0216 14:33:54.161730 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f52dc84a-938b-4013-b9d1-fbcdb6360261" containerName="nova-cell0-conductor-db-sync" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.161759 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f52dc84a-938b-4013-b9d1-fbcdb6360261" containerName="nova-cell0-conductor-db-sync" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.161953 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f52dc84a-938b-4013-b9d1-fbcdb6360261" containerName="nova-cell0-conductor-db-sync" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.162718 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.165202 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.165722 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-dzxmw" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.166737 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.229249 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.229349 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv8pq\" (UniqueName: \"kubernetes.io/projected/7366b149-cbc0-40c5-a9aa-753f21d7c971-kube-api-access-vv8pq\") pod \"nova-cell0-conductor-0\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.229437 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.330849 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv8pq\" (UniqueName: \"kubernetes.io/projected/7366b149-cbc0-40c5-a9aa-753f21d7c971-kube-api-access-vv8pq\") pod \"nova-cell0-conductor-0\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.330916 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.331038 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.336562 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.343703 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.351860 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv8pq\" (UniqueName: \"kubernetes.io/projected/7366b149-cbc0-40c5-a9aa-753f21d7c971-kube-api-access-vv8pq\") pod \"nova-cell0-conductor-0\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.491272 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:54 crc kubenswrapper[4816]: I0216 14:33:54.912976 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 14:33:55 crc kubenswrapper[4816]: I0216 14:33:55.078851 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7366b149-cbc0-40c5-a9aa-753f21d7c971","Type":"ContainerStarted","Data":"339b1c7edb36252ad9dec7e9db79707e345d7badab3b696c046f2351f0a67cf6"} Feb 16 14:33:55 crc kubenswrapper[4816]: I0216 14:33:55.079220 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 16 14:33:55 crc kubenswrapper[4816]: I0216 14:33:55.079233 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7366b149-cbc0-40c5-a9aa-753f21d7c971","Type":"ContainerStarted","Data":"6e70c03c28c88c2996b5642e8f4594559170c0668ce53c4872a59140d3196a00"} Feb 16 14:33:55 crc kubenswrapper[4816]: I0216 14:33:55.096841 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.096821181 podStartE2EDuration="1.096821181s" podCreationTimestamp="2026-02-16 14:33:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:33:55.092724949 +0000 UTC m=+5434.419438687" watchObservedRunningTime="2026-02-16 14:33:55.096821181 +0000 UTC m=+5434.423534899" Feb 16 14:33:56 crc kubenswrapper[4816]: I0216 14:33:56.400478 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:33:56 crc kubenswrapper[4816]: E0216 14:33:56.401070 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:34:04 crc kubenswrapper[4816]: I0216 14:34:04.517447 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 16 14:34:04 crc kubenswrapper[4816]: I0216 14:34:04.957131 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-tqhhr"] Feb 16 14:34:04 crc kubenswrapper[4816]: I0216 14:34:04.958387 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:04 crc kubenswrapper[4816]: I0216 14:34:04.962694 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 16 14:34:04 crc kubenswrapper[4816]: I0216 14:34:04.962741 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 16 14:34:04 crc kubenswrapper[4816]: I0216 14:34:04.973016 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-tqhhr"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.039166 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75pzr\" (UniqueName: \"kubernetes.io/projected/2158e136-e15e-4388-aaba-4a3a6d936dbe-kube-api-access-75pzr\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.039235 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-config-data\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.039564 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-scripts\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.039679 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.141958 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-scripts\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.142029 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.142128 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75pzr\" (UniqueName: \"kubernetes.io/projected/2158e136-e15e-4388-aaba-4a3a6d936dbe-kube-api-access-75pzr\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.142175 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-config-data\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.147419 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-scripts\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.151250 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-config-data\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.156462 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.172031 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.172533 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75pzr\" (UniqueName: \"kubernetes.io/projected/2158e136-e15e-4388-aaba-4a3a6d936dbe-kube-api-access-75pzr\") pod \"nova-cell0-cell-mapping-tqhhr\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.174294 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.183908 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.184040 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.185647 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.190310 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.239484 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.245311 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-config-data\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.245372 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-logs\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.245419 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.245456 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbbpf\" (UniqueName: \"kubernetes.io/projected/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-kube-api-access-vbbpf\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.245489 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.245507 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qw7t\" (UniqueName: \"kubernetes.io/projected/efedca8a-1bb7-4b68-8368-2c69fba8c489-kube-api-access-7qw7t\") pod \"nova-cell1-novncproxy-0\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.245530 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.255236 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.278885 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.318160 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.320059 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.329160 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.370111 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.414474 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.424141 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.424361 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbbpf\" (UniqueName: \"kubernetes.io/projected/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-kube-api-access-vbbpf\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.424440 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.424469 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qw7t\" (UniqueName: \"kubernetes.io/projected/efedca8a-1bb7-4b68-8368-2c69fba8c489-kube-api-access-7qw7t\") pod \"nova-cell1-novncproxy-0\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.424516 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.430576 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-config-data\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.430671 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-logs\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.445552 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-logs\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.486327 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-config-data\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.492985 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.494713 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbbpf\" (UniqueName: \"kubernetes.io/projected/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-kube-api-access-vbbpf\") pod \"nova-api-0\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.495059 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.498774 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qw7t\" (UniqueName: \"kubernetes.io/projected/efedca8a-1bb7-4b68-8368-2c69fba8c489-kube-api-access-7qw7t\") pod \"nova-cell1-novncproxy-0\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.516052 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.517916 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.524109 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.525124 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.536808 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.537309 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24lcj\" (UniqueName: \"kubernetes.io/projected/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-kube-api-access-24lcj\") pod \"nova-scheduler-0\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.537370 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-config-data\") pod \"nova-scheduler-0\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.542543 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54fbc4c87-qldfx"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.544536 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.558356 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54fbc4c87-qldfx"] Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.572646 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.584562 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641180 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-config\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641261 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-logs\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641297 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-sb\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641332 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rndj2\" (UniqueName: \"kubernetes.io/projected/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-kube-api-access-rndj2\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641351 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-nb\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641407 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641428 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24lcj\" (UniqueName: \"kubernetes.io/projected/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-kube-api-access-24lcj\") pod \"nova-scheduler-0\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641464 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-config-data\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641482 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-config-data\") pod \"nova-scheduler-0\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641510 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcwrk\" (UniqueName: \"kubernetes.io/projected/ba96c5a1-a23d-4616-b648-2644b8de5a32-kube-api-access-fcwrk\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641541 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-dns-svc\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.641566 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.647968 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.649638 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-config-data\") pod \"nova-scheduler-0\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.662752 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24lcj\" (UniqueName: \"kubernetes.io/projected/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-kube-api-access-24lcj\") pod \"nova-scheduler-0\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.704043 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.743438 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-config\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.743539 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-logs\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.743565 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-sb\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.743592 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rndj2\" (UniqueName: \"kubernetes.io/projected/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-kube-api-access-rndj2\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.743607 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-nb\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.743711 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-config-data\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.744969 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-sb\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.745286 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-config\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.745677 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-logs\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.746015 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-nb\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.746069 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcwrk\" (UniqueName: \"kubernetes.io/projected/ba96c5a1-a23d-4616-b648-2644b8de5a32-kube-api-access-fcwrk\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.746109 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-dns-svc\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.746144 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.748105 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-dns-svc\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.751729 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.752095 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-config-data\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.775314 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcwrk\" (UniqueName: \"kubernetes.io/projected/ba96c5a1-a23d-4616-b648-2644b8de5a32-kube-api-access-fcwrk\") pod \"dnsmasq-dns-54fbc4c87-qldfx\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.792326 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rndj2\" (UniqueName: \"kubernetes.io/projected/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-kube-api-access-rndj2\") pod \"nova-metadata-0\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.868545 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.877682 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:05 crc kubenswrapper[4816]: I0216 14:34:05.891727 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 14:34:05 crc kubenswrapper[4816]: W0216 14:34:05.903447 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podefedca8a_1bb7_4b68_8368_2c69fba8c489.slice/crio-2503654d1dbd6c1041b6b58cec9672cbfb7c7516a7c93592a129993395e6da9e WatchSource:0}: Error finding container 2503654d1dbd6c1041b6b58cec9672cbfb7c7516a7c93592a129993395e6da9e: Status 404 returned error can't find the container with id 2503654d1dbd6c1041b6b58cec9672cbfb7c7516a7c93592a129993395e6da9e Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:05.970098 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-tqhhr"] Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.183590 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.218480 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tqhhr" event={"ID":"2158e136-e15e-4388-aaba-4a3a6d936dbe","Type":"ContainerStarted","Data":"747406bc6426280cd23d4164d217c1dab6fac3936b7f9f2c4a9c4060214bce9f"} Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.221813 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"efedca8a-1bb7-4b68-8368-2c69fba8c489","Type":"ContainerStarted","Data":"2503654d1dbd6c1041b6b58cec9672cbfb7c7516a7c93592a129993395e6da9e"} Feb 16 14:34:06 crc kubenswrapper[4816]: W0216 14:34:06.252675 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3cea6032_c8ee_4e5f_8d99_11ddf4af40eb.slice/crio-61a34f65a8461de77189002d281c2c15c8cac2ca68d8c1ae98ace26c058f642c WatchSource:0}: Error finding container 61a34f65a8461de77189002d281c2c15c8cac2ca68d8c1ae98ace26c058f642c: Status 404 returned error can't find the container with id 61a34f65a8461de77189002d281c2c15c8cac2ca68d8c1ae98ace26c058f642c Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.359262 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8cdtd"] Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.362900 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.379078 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8cdtd"] Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.383402 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.383834 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 16 14:34:06 crc kubenswrapper[4816]: W0216 14:34:06.389767 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2941642d_5bb2_4ea5_b3b8_a0dc0db695c9.slice/crio-4227738665f6ec83ffa495f4f9741004efa1ee755e5b52bea0305d78d458093c WatchSource:0}: Error finding container 4227738665f6ec83ffa495f4f9741004efa1ee755e5b52bea0305d78d458093c: Status 404 returned error can't find the container with id 4227738665f6ec83ffa495f4f9741004efa1ee755e5b52bea0305d78d458093c Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.390937 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.478292 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-config-data\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.478336 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.478365 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzm95\" (UniqueName: \"kubernetes.io/projected/7eddabf2-6f16-4466-a536-79daaa13fef8-kube-api-access-xzm95\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.478461 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-scripts\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.579852 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-scripts\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.579980 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-config-data\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.580006 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.580028 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzm95\" (UniqueName: \"kubernetes.io/projected/7eddabf2-6f16-4466-a536-79daaa13fef8-kube-api-access-xzm95\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.590216 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.590541 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-scripts\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.591198 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-config-data\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.599968 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzm95\" (UniqueName: \"kubernetes.io/projected/7eddabf2-6f16-4466-a536-79daaa13fef8-kube-api-access-xzm95\") pod \"nova-cell1-conductor-db-sync-8cdtd\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:06 crc kubenswrapper[4816]: I0216 14:34:06.771683 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.214972 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54fbc4c87-qldfx"] Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.221202 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.251410 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" event={"ID":"ba96c5a1-a23d-4616-b648-2644b8de5a32","Type":"ContainerStarted","Data":"c205b2d064908847ef44f24fc5d39456934a0b1b9686881ea5c734f596e2ff65"} Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.253371 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9","Type":"ContainerStarted","Data":"5a9a4a7a46b1b068cd0b7663ef5953e5b9cf57e616eaf2c86988e4a4cd0ca156"} Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.253426 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9","Type":"ContainerStarted","Data":"4227738665f6ec83ffa495f4f9741004efa1ee755e5b52bea0305d78d458093c"} Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.257170 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tqhhr" event={"ID":"2158e136-e15e-4388-aaba-4a3a6d936dbe","Type":"ContainerStarted","Data":"76732a0ced934cf01b3e6505bf23a6da2a741abcc55920621e0db4aed7d34868"} Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.277547 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.277528899 podStartE2EDuration="2.277528899s" podCreationTimestamp="2026-02-16 14:34:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:07.271992658 +0000 UTC m=+5446.598706386" watchObservedRunningTime="2026-02-16 14:34:07.277528899 +0000 UTC m=+5446.604242627" Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.279526 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb","Type":"ContainerStarted","Data":"3776b1d38677c925e6e61fe12d702f12fc6f62009507eee6faea2a47dcd5b4e6"} Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.279558 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb","Type":"ContainerStarted","Data":"eadce4ba8279050680e32c0d2b52e0fd186561d56dc5eaada4edbb0650f304ac"} Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.279568 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb","Type":"ContainerStarted","Data":"61a34f65a8461de77189002d281c2c15c8cac2ca68d8c1ae98ace26c058f642c"} Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.291972 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"efedca8a-1bb7-4b68-8368-2c69fba8c489","Type":"ContainerStarted","Data":"b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d"} Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.295002 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-tqhhr" podStartSLOduration=3.294976835 podStartE2EDuration="3.294976835s" podCreationTimestamp="2026-02-16 14:34:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:07.292311762 +0000 UTC m=+5446.619025490" watchObservedRunningTime="2026-02-16 14:34:07.294976835 +0000 UTC m=+5446.621690563" Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.356585 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.356553195 podStartE2EDuration="2.356553195s" podCreationTimestamp="2026-02-16 14:34:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:07.313543742 +0000 UTC m=+5446.640257470" watchObservedRunningTime="2026-02-16 14:34:07.356553195 +0000 UTC m=+5446.683266923" Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.367417 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8cdtd"] Feb 16 14:34:07 crc kubenswrapper[4816]: I0216 14:34:07.369452 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.369437896 podStartE2EDuration="2.369437896s" podCreationTimestamp="2026-02-16 14:34:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:07.348508545 +0000 UTC m=+5446.675222273" watchObservedRunningTime="2026-02-16 14:34:07.369437896 +0000 UTC m=+5446.696151634" Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.322828 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c7c7b2b7-55bf-4359-8a5d-b71a0239e746","Type":"ContainerStarted","Data":"9d5127ebbf660448ed181aa24c36ef7956db25911330ed6981643b4794aff16f"} Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.324272 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c7c7b2b7-55bf-4359-8a5d-b71a0239e746","Type":"ContainerStarted","Data":"0d1b48a04a041b04a08f758c2e80f2a35f10d5d4182e545e3f5bc71828d0b366"} Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.324350 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c7c7b2b7-55bf-4359-8a5d-b71a0239e746","Type":"ContainerStarted","Data":"9f7416deca440c2c82704103b314bc10300ca813bd0d4f6ce8d5081157e321d0"} Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.325677 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-8cdtd" event={"ID":"7eddabf2-6f16-4466-a536-79daaa13fef8","Type":"ContainerStarted","Data":"b58d4c0e79eaf9cf535ba14e2aadc38a7679ed768c9b43c41bef2ae071180560"} Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.325737 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-8cdtd" event={"ID":"7eddabf2-6f16-4466-a536-79daaa13fef8","Type":"ContainerStarted","Data":"95a0bbe2390847b92322234accb8647f52ca143a6eabb4bae0e43f8816e1ea84"} Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.328667 4816 generic.go:334] "Generic (PLEG): container finished" podID="ba96c5a1-a23d-4616-b648-2644b8de5a32" containerID="f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5" exitCode=0 Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.328767 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" event={"ID":"ba96c5a1-a23d-4616-b648-2644b8de5a32","Type":"ContainerDied","Data":"f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5"} Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.360928 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.360902559 podStartE2EDuration="3.360902559s" podCreationTimestamp="2026-02-16 14:34:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:08.350103235 +0000 UTC m=+5447.676816973" watchObservedRunningTime="2026-02-16 14:34:08.360902559 +0000 UTC m=+5447.687616287" Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.398531 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:34:08 crc kubenswrapper[4816]: E0216 14:34:08.398769 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:34:08 crc kubenswrapper[4816]: I0216 14:34:08.406582 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-8cdtd" podStartSLOduration=2.406563975 podStartE2EDuration="2.406563975s" podCreationTimestamp="2026-02-16 14:34:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:08.395693768 +0000 UTC m=+5447.722407506" watchObservedRunningTime="2026-02-16 14:34:08.406563975 +0000 UTC m=+5447.733277703" Feb 16 14:34:09 crc kubenswrapper[4816]: I0216 14:34:09.338162 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" event={"ID":"ba96c5a1-a23d-4616-b648-2644b8de5a32","Type":"ContainerStarted","Data":"5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c"} Feb 16 14:34:09 crc kubenswrapper[4816]: I0216 14:34:09.359264 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" podStartSLOduration=4.3592429 podStartE2EDuration="4.3592429s" podCreationTimestamp="2026-02-16 14:34:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:09.353535024 +0000 UTC m=+5448.680248762" watchObservedRunningTime="2026-02-16 14:34:09.3592429 +0000 UTC m=+5448.685956628" Feb 16 14:34:10 crc kubenswrapper[4816]: I0216 14:34:10.346528 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:10 crc kubenswrapper[4816]: I0216 14:34:10.585132 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:10 crc kubenswrapper[4816]: I0216 14:34:10.705163 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 16 14:34:10 crc kubenswrapper[4816]: I0216 14:34:10.868714 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 14:34:10 crc kubenswrapper[4816]: I0216 14:34:10.869051 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 14:34:11 crc kubenswrapper[4816]: I0216 14:34:11.355211 4816 generic.go:334] "Generic (PLEG): container finished" podID="7eddabf2-6f16-4466-a536-79daaa13fef8" containerID="b58d4c0e79eaf9cf535ba14e2aadc38a7679ed768c9b43c41bef2ae071180560" exitCode=0 Feb 16 14:34:11 crc kubenswrapper[4816]: I0216 14:34:11.355283 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-8cdtd" event={"ID":"7eddabf2-6f16-4466-a536-79daaa13fef8","Type":"ContainerDied","Data":"b58d4c0e79eaf9cf535ba14e2aadc38a7679ed768c9b43c41bef2ae071180560"} Feb 16 14:34:11 crc kubenswrapper[4816]: I0216 14:34:11.357161 4816 generic.go:334] "Generic (PLEG): container finished" podID="2158e136-e15e-4388-aaba-4a3a6d936dbe" containerID="76732a0ced934cf01b3e6505bf23a6da2a741abcc55920621e0db4aed7d34868" exitCode=0 Feb 16 14:34:11 crc kubenswrapper[4816]: I0216 14:34:11.357244 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tqhhr" event={"ID":"2158e136-e15e-4388-aaba-4a3a6d936dbe","Type":"ContainerDied","Data":"76732a0ced934cf01b3e6505bf23a6da2a741abcc55920621e0db4aed7d34868"} Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.796326 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.800397 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.918229 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-scripts\") pod \"2158e136-e15e-4388-aaba-4a3a6d936dbe\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.918368 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-config-data\") pod \"2158e136-e15e-4388-aaba-4a3a6d936dbe\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.918417 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-config-data\") pod \"7eddabf2-6f16-4466-a536-79daaa13fef8\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.918449 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-combined-ca-bundle\") pod \"2158e136-e15e-4388-aaba-4a3a6d936dbe\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.918509 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-combined-ca-bundle\") pod \"7eddabf2-6f16-4466-a536-79daaa13fef8\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.918529 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75pzr\" (UniqueName: \"kubernetes.io/projected/2158e136-e15e-4388-aaba-4a3a6d936dbe-kube-api-access-75pzr\") pod \"2158e136-e15e-4388-aaba-4a3a6d936dbe\" (UID: \"2158e136-e15e-4388-aaba-4a3a6d936dbe\") " Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.918580 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzm95\" (UniqueName: \"kubernetes.io/projected/7eddabf2-6f16-4466-a536-79daaa13fef8-kube-api-access-xzm95\") pod \"7eddabf2-6f16-4466-a536-79daaa13fef8\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.918629 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-scripts\") pod \"7eddabf2-6f16-4466-a536-79daaa13fef8\" (UID: \"7eddabf2-6f16-4466-a536-79daaa13fef8\") " Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.924014 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eddabf2-6f16-4466-a536-79daaa13fef8-kube-api-access-xzm95" (OuterVolumeSpecName: "kube-api-access-xzm95") pod "7eddabf2-6f16-4466-a536-79daaa13fef8" (UID: "7eddabf2-6f16-4466-a536-79daaa13fef8"). InnerVolumeSpecName "kube-api-access-xzm95". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.924352 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2158e136-e15e-4388-aaba-4a3a6d936dbe-kube-api-access-75pzr" (OuterVolumeSpecName: "kube-api-access-75pzr") pod "2158e136-e15e-4388-aaba-4a3a6d936dbe" (UID: "2158e136-e15e-4388-aaba-4a3a6d936dbe"). InnerVolumeSpecName "kube-api-access-75pzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.924476 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-scripts" (OuterVolumeSpecName: "scripts") pod "2158e136-e15e-4388-aaba-4a3a6d936dbe" (UID: "2158e136-e15e-4388-aaba-4a3a6d936dbe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.936754 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-scripts" (OuterVolumeSpecName: "scripts") pod "7eddabf2-6f16-4466-a536-79daaa13fef8" (UID: "7eddabf2-6f16-4466-a536-79daaa13fef8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.942355 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-config-data" (OuterVolumeSpecName: "config-data") pod "7eddabf2-6f16-4466-a536-79daaa13fef8" (UID: "7eddabf2-6f16-4466-a536-79daaa13fef8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.943047 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2158e136-e15e-4388-aaba-4a3a6d936dbe" (UID: "2158e136-e15e-4388-aaba-4a3a6d936dbe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.948323 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7eddabf2-6f16-4466-a536-79daaa13fef8" (UID: "7eddabf2-6f16-4466-a536-79daaa13fef8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:12 crc kubenswrapper[4816]: I0216 14:34:12.949849 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-config-data" (OuterVolumeSpecName: "config-data") pod "2158e136-e15e-4388-aaba-4a3a6d936dbe" (UID: "2158e136-e15e-4388-aaba-4a3a6d936dbe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.020930 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzm95\" (UniqueName: \"kubernetes.io/projected/7eddabf2-6f16-4466-a536-79daaa13fef8-kube-api-access-xzm95\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.020965 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.020978 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.020988 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.021003 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.021044 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2158e136-e15e-4388-aaba-4a3a6d936dbe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.021055 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eddabf2-6f16-4466-a536-79daaa13fef8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.021065 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75pzr\" (UniqueName: \"kubernetes.io/projected/2158e136-e15e-4388-aaba-4a3a6d936dbe-kube-api-access-75pzr\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.380056 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-8cdtd" event={"ID":"7eddabf2-6f16-4466-a536-79daaa13fef8","Type":"ContainerDied","Data":"95a0bbe2390847b92322234accb8647f52ca143a6eabb4bae0e43f8816e1ea84"} Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.380282 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95a0bbe2390847b92322234accb8647f52ca143a6eabb4bae0e43f8816e1ea84" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.380314 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-8cdtd" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.381973 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tqhhr" event={"ID":"2158e136-e15e-4388-aaba-4a3a6d936dbe","Type":"ContainerDied","Data":"747406bc6426280cd23d4164d217c1dab6fac3936b7f9f2c4a9c4060214bce9f"} Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.382061 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="747406bc6426280cd23d4164d217c1dab6fac3936b7f9f2c4a9c4060214bce9f" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.382105 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tqhhr" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.465546 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 14:34:13 crc kubenswrapper[4816]: E0216 14:34:13.466198 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eddabf2-6f16-4466-a536-79daaa13fef8" containerName="nova-cell1-conductor-db-sync" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.466218 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eddabf2-6f16-4466-a536-79daaa13fef8" containerName="nova-cell1-conductor-db-sync" Feb 16 14:34:13 crc kubenswrapper[4816]: E0216 14:34:13.466226 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2158e136-e15e-4388-aaba-4a3a6d936dbe" containerName="nova-manage" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.466232 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="2158e136-e15e-4388-aaba-4a3a6d936dbe" containerName="nova-manage" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.466406 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="2158e136-e15e-4388-aaba-4a3a6d936dbe" containerName="nova-manage" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.466437 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eddabf2-6f16-4466-a536-79daaa13fef8" containerName="nova-cell1-conductor-db-sync" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.466988 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.467065 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.470383 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.958911 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.959180 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerName="nova-api-log" containerID="cri-o://eadce4ba8279050680e32c0d2b52e0fd186561d56dc5eaada4edbb0650f304ac" gracePeriod=30 Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.959300 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerName="nova-api-api" containerID="cri-o://3776b1d38677c925e6e61fe12d702f12fc6f62009507eee6faea2a47dcd5b4e6" gracePeriod=30 Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.971858 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.972719 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.972862 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g5wb\" (UniqueName: \"kubernetes.io/projected/7b2ba465-5c2c-460b-8656-edc33f2015b1-kube-api-access-6g5wb\") pod \"nova-cell1-conductor-0\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.980105 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:13 crc kubenswrapper[4816]: I0216 14:34:13.980381 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2941642d-5bb2-4ea5-b3b8-a0dc0db695c9" containerName="nova-scheduler-scheduler" containerID="cri-o://5a9a4a7a46b1b068cd0b7663ef5953e5b9cf57e616eaf2c86988e4a4cd0ca156" gracePeriod=30 Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.043191 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.043434 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerName="nova-metadata-log" containerID="cri-o://0d1b48a04a041b04a08f758c2e80f2a35f10d5d4182e545e3f5bc71828d0b366" gracePeriod=30 Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.043591 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerName="nova-metadata-metadata" containerID="cri-o://9d5127ebbf660448ed181aa24c36ef7956db25911330ed6981643b4794aff16f" gracePeriod=30 Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.074478 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.074525 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.074566 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g5wb\" (UniqueName: \"kubernetes.io/projected/7b2ba465-5c2c-460b-8656-edc33f2015b1-kube-api-access-6g5wb\") pod \"nova-cell1-conductor-0\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.079763 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.084190 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.101625 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g5wb\" (UniqueName: \"kubernetes.io/projected/7b2ba465-5c2c-460b-8656-edc33f2015b1-kube-api-access-6g5wb\") pod \"nova-cell1-conductor-0\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.384940 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.406583 4816 generic.go:334] "Generic (PLEG): container finished" podID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerID="9d5127ebbf660448ed181aa24c36ef7956db25911330ed6981643b4794aff16f" exitCode=0 Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.406620 4816 generic.go:334] "Generic (PLEG): container finished" podID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerID="0d1b48a04a041b04a08f758c2e80f2a35f10d5d4182e545e3f5bc71828d0b366" exitCode=143 Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.406714 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c7c7b2b7-55bf-4359-8a5d-b71a0239e746","Type":"ContainerDied","Data":"9d5127ebbf660448ed181aa24c36ef7956db25911330ed6981643b4794aff16f"} Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.406752 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c7c7b2b7-55bf-4359-8a5d-b71a0239e746","Type":"ContainerDied","Data":"0d1b48a04a041b04a08f758c2e80f2a35f10d5d4182e545e3f5bc71828d0b366"} Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.434551 4816 generic.go:334] "Generic (PLEG): container finished" podID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerID="3776b1d38677c925e6e61fe12d702f12fc6f62009507eee6faea2a47dcd5b4e6" exitCode=0 Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.434579 4816 generic.go:334] "Generic (PLEG): container finished" podID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerID="eadce4ba8279050680e32c0d2b52e0fd186561d56dc5eaada4edbb0650f304ac" exitCode=143 Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.434603 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb","Type":"ContainerDied","Data":"3776b1d38677c925e6e61fe12d702f12fc6f62009507eee6faea2a47dcd5b4e6"} Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.434628 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb","Type":"ContainerDied","Data":"eadce4ba8279050680e32c0d2b52e0fd186561d56dc5eaada4edbb0650f304ac"} Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.533067 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.540255 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.583944 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-combined-ca-bundle\") pod \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.584000 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-combined-ca-bundle\") pod \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.584037 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-config-data\") pod \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.584073 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rndj2\" (UniqueName: \"kubernetes.io/projected/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-kube-api-access-rndj2\") pod \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.584128 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-logs\") pod \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.584160 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-logs\") pod \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.584249 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-config-data\") pod \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\" (UID: \"c7c7b2b7-55bf-4359-8a5d-b71a0239e746\") " Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.584281 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbbpf\" (UniqueName: \"kubernetes.io/projected/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-kube-api-access-vbbpf\") pod \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\" (UID: \"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb\") " Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.585037 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-logs" (OuterVolumeSpecName: "logs") pod "c7c7b2b7-55bf-4359-8a5d-b71a0239e746" (UID: "c7c7b2b7-55bf-4359-8a5d-b71a0239e746"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.585226 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-logs" (OuterVolumeSpecName: "logs") pod "3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" (UID: "3cea6032-c8ee-4e5f-8d99-11ddf4af40eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.589210 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-kube-api-access-vbbpf" (OuterVolumeSpecName: "kube-api-access-vbbpf") pod "3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" (UID: "3cea6032-c8ee-4e5f-8d99-11ddf4af40eb"). InnerVolumeSpecName "kube-api-access-vbbpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.595644 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-kube-api-access-rndj2" (OuterVolumeSpecName: "kube-api-access-rndj2") pod "c7c7b2b7-55bf-4359-8a5d-b71a0239e746" (UID: "c7c7b2b7-55bf-4359-8a5d-b71a0239e746"). InnerVolumeSpecName "kube-api-access-rndj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.611560 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-config-data" (OuterVolumeSpecName: "config-data") pod "3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" (UID: "3cea6032-c8ee-4e5f-8d99-11ddf4af40eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.611669 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7c7b2b7-55bf-4359-8a5d-b71a0239e746" (UID: "c7c7b2b7-55bf-4359-8a5d-b71a0239e746"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.618924 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" (UID: "3cea6032-c8ee-4e5f-8d99-11ddf4af40eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.625329 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-config-data" (OuterVolumeSpecName: "config-data") pod "c7c7b2b7-55bf-4359-8a5d-b71a0239e746" (UID: "c7c7b2b7-55bf-4359-8a5d-b71a0239e746"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.686209 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.686525 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.686540 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.686550 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rndj2\" (UniqueName: \"kubernetes.io/projected/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-kube-api-access-rndj2\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.686564 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.686573 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.686584 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7c7b2b7-55bf-4359-8a5d-b71a0239e746-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.686594 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbbpf\" (UniqueName: \"kubernetes.io/projected/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb-kube-api-access-vbbpf\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:14 crc kubenswrapper[4816]: I0216 14:34:14.850419 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.443153 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7b2ba465-5c2c-460b-8656-edc33f2015b1","Type":"ContainerStarted","Data":"6177b192061a23f28c87c9be802fd4cc201dbedbba40a631fd4503266ce7bc45"} Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.443576 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7b2ba465-5c2c-460b-8656-edc33f2015b1","Type":"ContainerStarted","Data":"df443f2afe33ed1b3bf2d7675e96289c990204a05c29e41ee1ff1d3229ca0216"} Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.443604 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.446450 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c7c7b2b7-55bf-4359-8a5d-b71a0239e746","Type":"ContainerDied","Data":"9f7416deca440c2c82704103b314bc10300ca813bd0d4f6ce8d5081157e321d0"} Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.446626 4816 scope.go:117] "RemoveContainer" containerID="9d5127ebbf660448ed181aa24c36ef7956db25911330ed6981643b4794aff16f" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.446898 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.449581 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3cea6032-c8ee-4e5f-8d99-11ddf4af40eb","Type":"ContainerDied","Data":"61a34f65a8461de77189002d281c2c15c8cac2ca68d8c1ae98ace26c058f642c"} Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.450042 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.464887 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.464862472 podStartE2EDuration="2.464862472s" podCreationTimestamp="2026-02-16 14:34:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:15.460174445 +0000 UTC m=+5454.786888193" watchObservedRunningTime="2026-02-16 14:34:15.464862472 +0000 UTC m=+5454.791576200" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.492760 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.494988 4816 scope.go:117] "RemoveContainer" containerID="0d1b48a04a041b04a08f758c2e80f2a35f10d5d4182e545e3f5bc71828d0b366" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.525252 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.545249 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.550919 4816 scope.go:117] "RemoveContainer" containerID="3776b1d38677c925e6e61fe12d702f12fc6f62009507eee6faea2a47dcd5b4e6" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.557289 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.567681 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:15 crc kubenswrapper[4816]: E0216 14:34:15.568035 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerName="nova-api-log" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.568051 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerName="nova-api-log" Feb 16 14:34:15 crc kubenswrapper[4816]: E0216 14:34:15.568072 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerName="nova-metadata-log" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.568079 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerName="nova-metadata-log" Feb 16 14:34:15 crc kubenswrapper[4816]: E0216 14:34:15.568104 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerName="nova-metadata-metadata" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.568110 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerName="nova-metadata-metadata" Feb 16 14:34:15 crc kubenswrapper[4816]: E0216 14:34:15.568119 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerName="nova-api-api" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.568124 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerName="nova-api-api" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.568274 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerName="nova-api-log" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.568298 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" containerName="nova-api-api" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.568306 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerName="nova-metadata-log" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.568320 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" containerName="nova-metadata-metadata" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.569237 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.571600 4816 scope.go:117] "RemoveContainer" containerID="eadce4ba8279050680e32c0d2b52e0fd186561d56dc5eaada4edbb0650f304ac" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.572080 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.577364 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.581557 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.585645 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.587337 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.587783 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.599862 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.604055 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.604112 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-config-data\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.604573 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sclg\" (UniqueName: \"kubernetes.io/projected/55ad83c0-0f80-4edc-8605-e00381f07052-kube-api-access-4sclg\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.604799 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ad83c0-0f80-4edc-8605-e00381f07052-logs\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.604862 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9563c925-bde7-43c3-b7a4-7067b2b734ce-logs\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.604938 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl9wj\" (UniqueName: \"kubernetes.io/projected/9563c925-bde7-43c3-b7a4-7067b2b734ce-kube-api-access-fl9wj\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.604983 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-config-data\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.605173 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.606027 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.707075 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-config-data\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.707194 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.707268 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.707291 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-config-data\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.707420 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sclg\" (UniqueName: \"kubernetes.io/projected/55ad83c0-0f80-4edc-8605-e00381f07052-kube-api-access-4sclg\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.708059 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ad83c0-0f80-4edc-8605-e00381f07052-logs\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.709548 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ad83c0-0f80-4edc-8605-e00381f07052-logs\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.709602 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9563c925-bde7-43c3-b7a4-7067b2b734ce-logs\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.709688 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl9wj\" (UniqueName: \"kubernetes.io/projected/9563c925-bde7-43c3-b7a4-7067b2b734ce-kube-api-access-fl9wj\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.710928 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9563c925-bde7-43c3-b7a4-7067b2b734ce-logs\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.712959 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.713065 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-config-data\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.713610 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.720086 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-config-data\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.725998 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sclg\" (UniqueName: \"kubernetes.io/projected/55ad83c0-0f80-4edc-8605-e00381f07052-kube-api-access-4sclg\") pod \"nova-metadata-0\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.728130 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl9wj\" (UniqueName: \"kubernetes.io/projected/9563c925-bde7-43c3-b7a4-7067b2b734ce-kube-api-access-fl9wj\") pod \"nova-api-0\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.879834 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.891068 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.912998 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.953334 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ddc577c4f-cch6q"] Feb 16 14:34:15 crc kubenswrapper[4816]: I0216 14:34:15.953576 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" podUID="c5eb2031-9005-436a-827c-4c3df3b13d75" containerName="dnsmasq-dns" containerID="cri-o://a0e6b281aba02a20b5b90a05868baaebb124fd63a12b028ab95f350282699cbe" gracePeriod=10 Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.473843 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.476644 4816 generic.go:334] "Generic (PLEG): container finished" podID="c5eb2031-9005-436a-827c-4c3df3b13d75" containerID="a0e6b281aba02a20b5b90a05868baaebb124fd63a12b028ab95f350282699cbe" exitCode=0 Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.476729 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" event={"ID":"c5eb2031-9005-436a-827c-4c3df3b13d75","Type":"ContainerDied","Data":"a0e6b281aba02a20b5b90a05868baaebb124fd63a12b028ab95f350282699cbe"} Feb 16 14:34:16 crc kubenswrapper[4816]: W0216 14:34:16.481799 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55ad83c0_0f80_4edc_8605_e00381f07052.slice/crio-0edf095de7cf448aeb03618b42e374a7146ce5f82f02af57d5de008ebe6339c9 WatchSource:0}: Error finding container 0edf095de7cf448aeb03618b42e374a7146ce5f82f02af57d5de008ebe6339c9: Status 404 returned error can't find the container with id 0edf095de7cf448aeb03618b42e374a7146ce5f82f02af57d5de008ebe6339c9 Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.523430 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.589933 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:16 crc kubenswrapper[4816]: W0216 14:34:16.612612 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9563c925_bde7_43c3_b7a4_7067b2b734ce.slice/crio-f911e1e6a239a5d9743d4f26a3389f5b04e82bf54a52212accf06791410414be WatchSource:0}: Error finding container f911e1e6a239a5d9743d4f26a3389f5b04e82bf54a52212accf06791410414be: Status 404 returned error can't find the container with id f911e1e6a239a5d9743d4f26a3389f5b04e82bf54a52212accf06791410414be Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.682029 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.847631 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94h28\" (UniqueName: \"kubernetes.io/projected/c5eb2031-9005-436a-827c-4c3df3b13d75-kube-api-access-94h28\") pod \"c5eb2031-9005-436a-827c-4c3df3b13d75\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.848485 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-sb\") pod \"c5eb2031-9005-436a-827c-4c3df3b13d75\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.848610 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-config\") pod \"c5eb2031-9005-436a-827c-4c3df3b13d75\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.848808 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-nb\") pod \"c5eb2031-9005-436a-827c-4c3df3b13d75\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.848881 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-dns-svc\") pod \"c5eb2031-9005-436a-827c-4c3df3b13d75\" (UID: \"c5eb2031-9005-436a-827c-4c3df3b13d75\") " Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.854800 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5eb2031-9005-436a-827c-4c3df3b13d75-kube-api-access-94h28" (OuterVolumeSpecName: "kube-api-access-94h28") pod "c5eb2031-9005-436a-827c-4c3df3b13d75" (UID: "c5eb2031-9005-436a-827c-4c3df3b13d75"). InnerVolumeSpecName "kube-api-access-94h28". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.909939 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c5eb2031-9005-436a-827c-4c3df3b13d75" (UID: "c5eb2031-9005-436a-827c-4c3df3b13d75"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.910747 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c5eb2031-9005-436a-827c-4c3df3b13d75" (UID: "c5eb2031-9005-436a-827c-4c3df3b13d75"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.914227 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-config" (OuterVolumeSpecName: "config") pod "c5eb2031-9005-436a-827c-4c3df3b13d75" (UID: "c5eb2031-9005-436a-827c-4c3df3b13d75"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.917003 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c5eb2031-9005-436a-827c-4c3df3b13d75" (UID: "c5eb2031-9005-436a-827c-4c3df3b13d75"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.950828 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.950863 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.950877 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.950891 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94h28\" (UniqueName: \"kubernetes.io/projected/c5eb2031-9005-436a-827c-4c3df3b13d75-kube-api-access-94h28\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:16 crc kubenswrapper[4816]: I0216 14:34:16.950904 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5eb2031-9005-436a-827c-4c3df3b13d75-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.410151 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cea6032-c8ee-4e5f-8d99-11ddf4af40eb" path="/var/lib/kubelet/pods/3cea6032-c8ee-4e5f-8d99-11ddf4af40eb/volumes" Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.411067 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7c7b2b7-55bf-4359-8a5d-b71a0239e746" path="/var/lib/kubelet/pods/c7c7b2b7-55bf-4359-8a5d-b71a0239e746/volumes" Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.513410 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9563c925-bde7-43c3-b7a4-7067b2b734ce","Type":"ContainerStarted","Data":"d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d"} Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.513473 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9563c925-bde7-43c3-b7a4-7067b2b734ce","Type":"ContainerStarted","Data":"4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241"} Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.513485 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9563c925-bde7-43c3-b7a4-7067b2b734ce","Type":"ContainerStarted","Data":"f911e1e6a239a5d9743d4f26a3389f5b04e82bf54a52212accf06791410414be"} Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.515220 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" event={"ID":"c5eb2031-9005-436a-827c-4c3df3b13d75","Type":"ContainerDied","Data":"97bcd34cf7a622dbe2dc5581136a4a1fe876f92b7599f95d7de7890d6bfddc1a"} Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.515252 4816 scope.go:117] "RemoveContainer" containerID="a0e6b281aba02a20b5b90a05868baaebb124fd63a12b028ab95f350282699cbe" Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.515338 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddc577c4f-cch6q" Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.520649 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55ad83c0-0f80-4edc-8605-e00381f07052","Type":"ContainerStarted","Data":"dc81d85d46dd9e7d3069a295e2894ca0b7e7a61e9fb79bde5542b707cb635cf2"} Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.520775 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55ad83c0-0f80-4edc-8605-e00381f07052","Type":"ContainerStarted","Data":"d558e015f955b773d0287fe2954b11fb127a4bf8d6b29f1b138522652419197b"} Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.520786 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55ad83c0-0f80-4edc-8605-e00381f07052","Type":"ContainerStarted","Data":"0edf095de7cf448aeb03618b42e374a7146ce5f82f02af57d5de008ebe6339c9"} Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.538627 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.538608006 podStartE2EDuration="2.538608006s" podCreationTimestamp="2026-02-16 14:34:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:17.53581899 +0000 UTC m=+5456.862532718" watchObservedRunningTime="2026-02-16 14:34:17.538608006 +0000 UTC m=+5456.865321734" Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.544208 4816 scope.go:117] "RemoveContainer" containerID="d20e93b4a5955702d768494279d6f07469857f5327bfad0a016b26c0fef3a8c1" Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.570143 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.5701245459999997 podStartE2EDuration="2.570124546s" podCreationTimestamp="2026-02-16 14:34:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:17.562127807 +0000 UTC m=+5456.888841535" watchObservedRunningTime="2026-02-16 14:34:17.570124546 +0000 UTC m=+5456.896838274" Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.585110 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ddc577c4f-cch6q"] Feb 16 14:34:17 crc kubenswrapper[4816]: I0216 14:34:17.596701 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6ddc577c4f-cch6q"] Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.535520 4816 generic.go:334] "Generic (PLEG): container finished" podID="2941642d-5bb2-4ea5-b3b8-a0dc0db695c9" containerID="5a9a4a7a46b1b068cd0b7663ef5953e5b9cf57e616eaf2c86988e4a4cd0ca156" exitCode=0 Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.535630 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9","Type":"ContainerDied","Data":"5a9a4a7a46b1b068cd0b7663ef5953e5b9cf57e616eaf2c86988e4a4cd0ca156"} Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.535956 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9","Type":"ContainerDied","Data":"4227738665f6ec83ffa495f4f9741004efa1ee755e5b52bea0305d78d458093c"} Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.535979 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4227738665f6ec83ffa495f4f9741004efa1ee755e5b52bea0305d78d458093c" Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.546562 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.682351 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-combined-ca-bundle\") pod \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.682444 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-config-data\") pod \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.682476 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24lcj\" (UniqueName: \"kubernetes.io/projected/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-kube-api-access-24lcj\") pod \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\" (UID: \"2941642d-5bb2-4ea5-b3b8-a0dc0db695c9\") " Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.704947 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-kube-api-access-24lcj" (OuterVolumeSpecName: "kube-api-access-24lcj") pod "2941642d-5bb2-4ea5-b3b8-a0dc0db695c9" (UID: "2941642d-5bb2-4ea5-b3b8-a0dc0db695c9"). InnerVolumeSpecName "kube-api-access-24lcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.710209 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2941642d-5bb2-4ea5-b3b8-a0dc0db695c9" (UID: "2941642d-5bb2-4ea5-b3b8-a0dc0db695c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.732841 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-config-data" (OuterVolumeSpecName: "config-data") pod "2941642d-5bb2-4ea5-b3b8-a0dc0db695c9" (UID: "2941642d-5bb2-4ea5-b3b8-a0dc0db695c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.785022 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.785057 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24lcj\" (UniqueName: \"kubernetes.io/projected/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-kube-api-access-24lcj\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:18 crc kubenswrapper[4816]: I0216 14:34:18.785068 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.414555 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5eb2031-9005-436a-827c-4c3df3b13d75" path="/var/lib/kubelet/pods/c5eb2031-9005-436a-827c-4c3df3b13d75/volumes" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.415853 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.545242 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.571495 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.585495 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.595981 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:19 crc kubenswrapper[4816]: E0216 14:34:19.596405 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5eb2031-9005-436a-827c-4c3df3b13d75" containerName="init" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.596450 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5eb2031-9005-436a-827c-4c3df3b13d75" containerName="init" Feb 16 14:34:19 crc kubenswrapper[4816]: E0216 14:34:19.596464 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2941642d-5bb2-4ea5-b3b8-a0dc0db695c9" containerName="nova-scheduler-scheduler" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.596472 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="2941642d-5bb2-4ea5-b3b8-a0dc0db695c9" containerName="nova-scheduler-scheduler" Feb 16 14:34:19 crc kubenswrapper[4816]: E0216 14:34:19.596495 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5eb2031-9005-436a-827c-4c3df3b13d75" containerName="dnsmasq-dns" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.596503 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5eb2031-9005-436a-827c-4c3df3b13d75" containerName="dnsmasq-dns" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.596705 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="2941642d-5bb2-4ea5-b3b8-a0dc0db695c9" containerName="nova-scheduler-scheduler" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.596721 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5eb2031-9005-436a-827c-4c3df3b13d75" containerName="dnsmasq-dns" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.597400 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.599727 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.604767 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.700950 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-config-data\") pod \"nova-scheduler-0\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.701016 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g7s8\" (UniqueName: \"kubernetes.io/projected/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-kube-api-access-4g7s8\") pod \"nova-scheduler-0\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.701822 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.803335 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g7s8\" (UniqueName: \"kubernetes.io/projected/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-kube-api-access-4g7s8\") pod \"nova-scheduler-0\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.803450 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.803521 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-config-data\") pod \"nova-scheduler-0\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.807615 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-config-data\") pod \"nova-scheduler-0\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.810105 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.821377 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g7s8\" (UniqueName: \"kubernetes.io/projected/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-kube-api-access-4g7s8\") pod \"nova-scheduler-0\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.861071 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-rkv5w"] Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.864114 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.875185 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.875662 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.879485 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkv5w"] Feb 16 14:34:19 crc kubenswrapper[4816]: I0216 14:34:19.919744 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.009874 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg5jz\" (UniqueName: \"kubernetes.io/projected/d02a9f59-6c3c-471b-bee9-49b243451335-kube-api-access-tg5jz\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.009945 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-config-data\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.010009 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-scripts\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.010046 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.111118 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg5jz\" (UniqueName: \"kubernetes.io/projected/d02a9f59-6c3c-471b-bee9-49b243451335-kube-api-access-tg5jz\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.111471 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-config-data\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.111525 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-scripts\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.111552 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.117548 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.117593 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-config-data\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.126403 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-scripts\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.130390 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg5jz\" (UniqueName: \"kubernetes.io/projected/d02a9f59-6c3c-471b-bee9-49b243451335-kube-api-access-tg5jz\") pod \"nova-cell1-cell-mapping-rkv5w\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.215386 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.358876 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.556587 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8","Type":"ContainerStarted","Data":"e221716f1b4ab94889aafb0a74ccca4cd83e359bc8b1ff3f45f8495442896cb1"} Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.556890 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8","Type":"ContainerStarted","Data":"f78c998a238af5ddf46dfe7ea252b8c6bf98432a16d0f601dba78ea5b916cc4a"} Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.571430 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.5714112180000002 podStartE2EDuration="1.571411218s" podCreationTimestamp="2026-02-16 14:34:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:20.571137511 +0000 UTC m=+5459.897851239" watchObservedRunningTime="2026-02-16 14:34:20.571411218 +0000 UTC m=+5459.898124946" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.663513 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkv5w"] Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.913698 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 14:34:20 crc kubenswrapper[4816]: I0216 14:34:20.913778 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 14:34:21 crc kubenswrapper[4816]: I0216 14:34:21.426215 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:34:21 crc kubenswrapper[4816]: E0216 14:34:21.426791 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:34:21 crc kubenswrapper[4816]: I0216 14:34:21.426882 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2941642d-5bb2-4ea5-b3b8-a0dc0db695c9" path="/var/lib/kubelet/pods/2941642d-5bb2-4ea5-b3b8-a0dc0db695c9/volumes" Feb 16 14:34:21 crc kubenswrapper[4816]: I0216 14:34:21.569899 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkv5w" event={"ID":"d02a9f59-6c3c-471b-bee9-49b243451335","Type":"ContainerStarted","Data":"0341e288a53c31a042bfa1475252a62e458bcccf05b4f66e0578c27267c33fa2"} Feb 16 14:34:21 crc kubenswrapper[4816]: I0216 14:34:21.569967 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkv5w" event={"ID":"d02a9f59-6c3c-471b-bee9-49b243451335","Type":"ContainerStarted","Data":"45d89c288142782b9ba162307145026518533b7a0b3a6b0466e393ac1da1f30a"} Feb 16 14:34:21 crc kubenswrapper[4816]: I0216 14:34:21.588720 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-rkv5w" podStartSLOduration=2.588702475 podStartE2EDuration="2.588702475s" podCreationTimestamp="2026-02-16 14:34:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:21.58481905 +0000 UTC m=+5460.911532798" watchObservedRunningTime="2026-02-16 14:34:21.588702475 +0000 UTC m=+5460.915416203" Feb 16 14:34:24 crc kubenswrapper[4816]: I0216 14:34:24.920965 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 16 14:34:25 crc kubenswrapper[4816]: I0216 14:34:25.605268 4816 generic.go:334] "Generic (PLEG): container finished" podID="d02a9f59-6c3c-471b-bee9-49b243451335" containerID="0341e288a53c31a042bfa1475252a62e458bcccf05b4f66e0578c27267c33fa2" exitCode=0 Feb 16 14:34:25 crc kubenswrapper[4816]: I0216 14:34:25.605320 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkv5w" event={"ID":"d02a9f59-6c3c-471b-bee9-49b243451335","Type":"ContainerDied","Data":"0341e288a53c31a042bfa1475252a62e458bcccf05b4f66e0578c27267c33fa2"} Feb 16 14:34:25 crc kubenswrapper[4816]: I0216 14:34:25.892348 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 14:34:25 crc kubenswrapper[4816]: I0216 14:34:25.892394 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 14:34:25 crc kubenswrapper[4816]: I0216 14:34:25.913529 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 14:34:25 crc kubenswrapper[4816]: I0216 14:34:25.913593 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.009018 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.016905 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.66:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.058007 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.65:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.058136 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.65:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.059173 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.66:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.145255 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-config-data\") pod \"d02a9f59-6c3c-471b-bee9-49b243451335\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.145295 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-combined-ca-bundle\") pod \"d02a9f59-6c3c-471b-bee9-49b243451335\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.145380 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tg5jz\" (UniqueName: \"kubernetes.io/projected/d02a9f59-6c3c-471b-bee9-49b243451335-kube-api-access-tg5jz\") pod \"d02a9f59-6c3c-471b-bee9-49b243451335\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.145415 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-scripts\") pod \"d02a9f59-6c3c-471b-bee9-49b243451335\" (UID: \"d02a9f59-6c3c-471b-bee9-49b243451335\") " Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.165180 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d02a9f59-6c3c-471b-bee9-49b243451335-kube-api-access-tg5jz" (OuterVolumeSpecName: "kube-api-access-tg5jz") pod "d02a9f59-6c3c-471b-bee9-49b243451335" (UID: "d02a9f59-6c3c-471b-bee9-49b243451335"). InnerVolumeSpecName "kube-api-access-tg5jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.165376 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-scripts" (OuterVolumeSpecName: "scripts") pod "d02a9f59-6c3c-471b-bee9-49b243451335" (UID: "d02a9f59-6c3c-471b-bee9-49b243451335"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.175904 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-config-data" (OuterVolumeSpecName: "config-data") pod "d02a9f59-6c3c-471b-bee9-49b243451335" (UID: "d02a9f59-6c3c-471b-bee9-49b243451335"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.181792 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d02a9f59-6c3c-471b-bee9-49b243451335" (UID: "d02a9f59-6c3c-471b-bee9-49b243451335"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.247338 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tg5jz\" (UniqueName: \"kubernetes.io/projected/d02a9f59-6c3c-471b-bee9-49b243451335-kube-api-access-tg5jz\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.247365 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.247375 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.247383 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d02a9f59-6c3c-471b-bee9-49b243451335-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.621940 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkv5w" event={"ID":"d02a9f59-6c3c-471b-bee9-49b243451335","Type":"ContainerDied","Data":"45d89c288142782b9ba162307145026518533b7a0b3a6b0466e393ac1da1f30a"} Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.621984 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45d89c288142782b9ba162307145026518533b7a0b3a6b0466e393ac1da1f30a" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.622016 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkv5w" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.775783 4816 scope.go:117] "RemoveContainer" containerID="649ec60e3054851ed900d5beb7e19cacf9b9200f6b5571dcf084f0b5f6bfae88" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.804160 4816 scope.go:117] "RemoveContainer" containerID="53c283b729b18f93462be9d74d6b4192007f3bd899ed5641ed30b51676c830ae" Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.833819 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.834180 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-log" containerID="cri-o://d558e015f955b773d0287fe2954b11fb127a4bf8d6b29f1b138522652419197b" gracePeriod=30 Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.834385 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-metadata" containerID="cri-o://dc81d85d46dd9e7d3069a295e2894ca0b7e7a61e9fb79bde5542b707cb635cf2" gracePeriod=30 Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.843717 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.843997 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8" containerName="nova-scheduler-scheduler" containerID="cri-o://e221716f1b4ab94889aafb0a74ccca4cd83e359bc8b1ff3f45f8495442896cb1" gracePeriod=30 Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.855246 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.855506 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-log" containerID="cri-o://4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241" gracePeriod=30 Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.855823 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-api" containerID="cri-o://d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d" gracePeriod=30 Feb 16 14:34:27 crc kubenswrapper[4816]: I0216 14:34:27.880211 4816 scope.go:117] "RemoveContainer" containerID="cf994ff080aa17b308e0aa59c8a7d65bfad202f529a1114a759d79ae58b2bf4e" Feb 16 14:34:28 crc kubenswrapper[4816]: I0216 14:34:28.632419 4816 generic.go:334] "Generic (PLEG): container finished" podID="55ad83c0-0f80-4edc-8605-e00381f07052" containerID="d558e015f955b773d0287fe2954b11fb127a4bf8d6b29f1b138522652419197b" exitCode=143 Feb 16 14:34:28 crc kubenswrapper[4816]: I0216 14:34:28.632493 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55ad83c0-0f80-4edc-8605-e00381f07052","Type":"ContainerDied","Data":"d558e015f955b773d0287fe2954b11fb127a4bf8d6b29f1b138522652419197b"} Feb 16 14:34:28 crc kubenswrapper[4816]: I0216 14:34:28.634710 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9563c925-bde7-43c3-b7a4-7067b2b734ce","Type":"ContainerDied","Data":"4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241"} Feb 16 14:34:28 crc kubenswrapper[4816]: I0216 14:34:28.634727 4816 generic.go:334] "Generic (PLEG): container finished" podID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerID="4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241" exitCode=143 Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.664396 4816 generic.go:334] "Generic (PLEG): container finished" podID="1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8" containerID="e221716f1b4ab94889aafb0a74ccca4cd83e359bc8b1ff3f45f8495442896cb1" exitCode=0 Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.664443 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8","Type":"ContainerDied","Data":"e221716f1b4ab94889aafb0a74ccca4cd83e359bc8b1ff3f45f8495442896cb1"} Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.668179 4816 generic.go:334] "Generic (PLEG): container finished" podID="55ad83c0-0f80-4edc-8605-e00381f07052" containerID="dc81d85d46dd9e7d3069a295e2894ca0b7e7a61e9fb79bde5542b707cb635cf2" exitCode=0 Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.668208 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55ad83c0-0f80-4edc-8605-e00381f07052","Type":"ContainerDied","Data":"dc81d85d46dd9e7d3069a295e2894ca0b7e7a61e9fb79bde5542b707cb635cf2"} Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.668225 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55ad83c0-0f80-4edc-8605-e00381f07052","Type":"ContainerDied","Data":"0edf095de7cf448aeb03618b42e374a7146ce5f82f02af57d5de008ebe6339c9"} Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.668237 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0edf095de7cf448aeb03618b42e374a7146ce5f82f02af57d5de008ebe6339c9" Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.764615 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.934954 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sclg\" (UniqueName: \"kubernetes.io/projected/55ad83c0-0f80-4edc-8605-e00381f07052-kube-api-access-4sclg\") pod \"55ad83c0-0f80-4edc-8605-e00381f07052\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.935067 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-combined-ca-bundle\") pod \"55ad83c0-0f80-4edc-8605-e00381f07052\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.935150 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-config-data\") pod \"55ad83c0-0f80-4edc-8605-e00381f07052\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.935229 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ad83c0-0f80-4edc-8605-e00381f07052-logs\") pod \"55ad83c0-0f80-4edc-8605-e00381f07052\" (UID: \"55ad83c0-0f80-4edc-8605-e00381f07052\") " Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.937203 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55ad83c0-0f80-4edc-8605-e00381f07052-logs" (OuterVolumeSpecName: "logs") pod "55ad83c0-0f80-4edc-8605-e00381f07052" (UID: "55ad83c0-0f80-4edc-8605-e00381f07052"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.961354 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ad83c0-0f80-4edc-8605-e00381f07052-kube-api-access-4sclg" (OuterVolumeSpecName: "kube-api-access-4sclg") pod "55ad83c0-0f80-4edc-8605-e00381f07052" (UID: "55ad83c0-0f80-4edc-8605-e00381f07052"). InnerVolumeSpecName "kube-api-access-4sclg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.966135 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-config-data" (OuterVolumeSpecName: "config-data") pod "55ad83c0-0f80-4edc-8605-e00381f07052" (UID: "55ad83c0-0f80-4edc-8605-e00381f07052"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.966981 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55ad83c0-0f80-4edc-8605-e00381f07052" (UID: "55ad83c0-0f80-4edc-8605-e00381f07052"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:31 crc kubenswrapper[4816]: I0216 14:34:31.994393 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.039145 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sclg\" (UniqueName: \"kubernetes.io/projected/55ad83c0-0f80-4edc-8605-e00381f07052-kube-api-access-4sclg\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.039195 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.039209 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55ad83c0-0f80-4edc-8605-e00381f07052-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.039224 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55ad83c0-0f80-4edc-8605-e00381f07052-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.140845 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-config-data\") pod \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.140966 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-combined-ca-bundle\") pod \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.141157 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g7s8\" (UniqueName: \"kubernetes.io/projected/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-kube-api-access-4g7s8\") pod \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\" (UID: \"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8\") " Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.143983 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-kube-api-access-4g7s8" (OuterVolumeSpecName: "kube-api-access-4g7s8") pod "1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8" (UID: "1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8"). InnerVolumeSpecName "kube-api-access-4g7s8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.161116 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-config-data" (OuterVolumeSpecName: "config-data") pod "1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8" (UID: "1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.163743 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8" (UID: "1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.242738 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g7s8\" (UniqueName: \"kubernetes.io/projected/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-kube-api-access-4g7s8\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.242785 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.242796 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.399781 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.400398 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.618391 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.655231 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fl9wj\" (UniqueName: \"kubernetes.io/projected/9563c925-bde7-43c3-b7a4-7067b2b734ce-kube-api-access-fl9wj\") pod \"9563c925-bde7-43c3-b7a4-7067b2b734ce\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.655386 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-combined-ca-bundle\") pod \"9563c925-bde7-43c3-b7a4-7067b2b734ce\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.655412 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-config-data\") pod \"9563c925-bde7-43c3-b7a4-7067b2b734ce\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.655464 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9563c925-bde7-43c3-b7a4-7067b2b734ce-logs\") pod \"9563c925-bde7-43c3-b7a4-7067b2b734ce\" (UID: \"9563c925-bde7-43c3-b7a4-7067b2b734ce\") " Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.656048 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9563c925-bde7-43c3-b7a4-7067b2b734ce-logs" (OuterVolumeSpecName: "logs") pod "9563c925-bde7-43c3-b7a4-7067b2b734ce" (UID: "9563c925-bde7-43c3-b7a4-7067b2b734ce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.660301 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9563c925-bde7-43c3-b7a4-7067b2b734ce-kube-api-access-fl9wj" (OuterVolumeSpecName: "kube-api-access-fl9wj") pod "9563c925-bde7-43c3-b7a4-7067b2b734ce" (UID: "9563c925-bde7-43c3-b7a4-7067b2b734ce"). InnerVolumeSpecName "kube-api-access-fl9wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.678771 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.678765 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8","Type":"ContainerDied","Data":"f78c998a238af5ddf46dfe7ea252b8c6bf98432a16d0f601dba78ea5b916cc4a"} Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.678911 4816 scope.go:117] "RemoveContainer" containerID="e221716f1b4ab94889aafb0a74ccca4cd83e359bc8b1ff3f45f8495442896cb1" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.682975 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9563c925-bde7-43c3-b7a4-7067b2b734ce" (UID: "9563c925-bde7-43c3-b7a4-7067b2b734ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.683001 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9563c925-bde7-43c3-b7a4-7067b2b734ce","Type":"ContainerDied","Data":"d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d"} Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.682984 4816 generic.go:334] "Generic (PLEG): container finished" podID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerID="d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d" exitCode=0 Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.683075 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.683134 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.683159 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9563c925-bde7-43c3-b7a4-7067b2b734ce","Type":"ContainerDied","Data":"f911e1e6a239a5d9743d4f26a3389f5b04e82bf54a52212accf06791410414be"} Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.688901 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-config-data" (OuterVolumeSpecName: "config-data") pod "9563c925-bde7-43c3-b7a4-7067b2b734ce" (UID: "9563c925-bde7-43c3-b7a4-7067b2b734ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.749807 4816 scope.go:117] "RemoveContainer" containerID="d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.757066 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9563c925-bde7-43c3-b7a4-7067b2b734ce-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.757100 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fl9wj\" (UniqueName: \"kubernetes.io/projected/9563c925-bde7-43c3-b7a4-7067b2b734ce-kube-api-access-fl9wj\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.757114 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.757126 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9563c925-bde7-43c3-b7a4-7067b2b734ce-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.775823 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.792708 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.803934 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.824922 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.824978 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.825259 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d02a9f59-6c3c-471b-bee9-49b243451335" containerName="nova-manage" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825275 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d02a9f59-6c3c-471b-bee9-49b243451335" containerName="nova-manage" Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.825288 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-metadata" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825295 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-metadata" Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.825317 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-api" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825324 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-api" Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.825335 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8" containerName="nova-scheduler-scheduler" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825340 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8" containerName="nova-scheduler-scheduler" Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.825353 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-log" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825359 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-log" Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.825368 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-log" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825374 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-log" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825549 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-log" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825563 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-api" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825580 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8" containerName="nova-scheduler-scheduler" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825587 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" containerName="nova-metadata-metadata" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825598 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" containerName="nova-api-log" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.825604 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d02a9f59-6c3c-471b-bee9-49b243451335" containerName="nova-manage" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.826142 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.826214 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.833743 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.834936 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.837697 4816 scope.go:117] "RemoveContainer" containerID="4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.837887 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.837989 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.858566 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-config-data\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.858603 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzv96\" (UniqueName: \"kubernetes.io/projected/8278cce4-3101-4681-b59b-8a597a462f78-kube-api-access-zzv96\") pod \"nova-scheduler-0\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.858628 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.858645 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8qm9\" (UniqueName: \"kubernetes.io/projected/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-kube-api-access-k8qm9\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.858681 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-config-data\") pod \"nova-scheduler-0\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.858727 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-logs\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.858780 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.862534 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.863728 4816 scope.go:117] "RemoveContainer" containerID="d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d" Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.864506 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d\": container with ID starting with d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d not found: ID does not exist" containerID="d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.864549 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d"} err="failed to get container status \"d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d\": rpc error: code = NotFound desc = could not find container \"d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d\": container with ID starting with d15cdb4dbe455527ba4ef2f9bf01c037183e0c2a3f99af6cbaf7ad90d6f5536d not found: ID does not exist" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.864576 4816 scope.go:117] "RemoveContainer" containerID="4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241" Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.867404 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241\": container with ID starting with 4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241 not found: ID does not exist" containerID="4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.867449 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241"} err="failed to get container status \"4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241\": rpc error: code = NotFound desc = could not find container \"4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241\": container with ID starting with 4d54767a1ec40d9f024d2fbea792ce336cd62b7a255d20260393c5f2cb3bf241 not found: ID does not exist" Feb 16 14:34:32 crc kubenswrapper[4816]: E0216 14:34:32.944763 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d1c72f9_85fa_4d12_9458_9b63c1bd5fc8.slice/crio-f78c998a238af5ddf46dfe7ea252b8c6bf98432a16d0f601dba78ea5b916cc4a\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55ad83c0_0f80_4edc_8605_e00381f07052.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d1c72f9_85fa_4d12_9458_9b63c1bd5fc8.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.960253 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.960335 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-config-data\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.960365 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzv96\" (UniqueName: \"kubernetes.io/projected/8278cce4-3101-4681-b59b-8a597a462f78-kube-api-access-zzv96\") pod \"nova-scheduler-0\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.960432 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.960458 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8qm9\" (UniqueName: \"kubernetes.io/projected/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-kube-api-access-k8qm9\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.960502 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-config-data\") pod \"nova-scheduler-0\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.960567 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-logs\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.961068 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-logs\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.966002 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.966053 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-config-data\") pod \"nova-scheduler-0\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.966361 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-config-data\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.966583 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.977530 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzv96\" (UniqueName: \"kubernetes.io/projected/8278cce4-3101-4681-b59b-8a597a462f78-kube-api-access-zzv96\") pod \"nova-scheduler-0\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " pod="openstack/nova-scheduler-0" Feb 16 14:34:32 crc kubenswrapper[4816]: I0216 14:34:32.979166 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8qm9\" (UniqueName: \"kubernetes.io/projected/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-kube-api-access-k8qm9\") pod \"nova-metadata-0\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " pod="openstack/nova-metadata-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.018247 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.026400 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.045949 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.047975 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.050553 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.064932 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.164090 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-logs\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.164475 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.164680 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxs8t\" (UniqueName: \"kubernetes.io/projected/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-kube-api-access-zxs8t\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.165163 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-config-data\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.166242 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.180375 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.266317 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-logs\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.266379 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.266413 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxs8t\" (UniqueName: \"kubernetes.io/projected/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-kube-api-access-zxs8t\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.266432 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-config-data\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.266874 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-logs\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.271691 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-config-data\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.272127 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.314504 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxs8t\" (UniqueName: \"kubernetes.io/projected/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-kube-api-access-zxs8t\") pod \"nova-api-0\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.370377 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.430930 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8" path="/var/lib/kubelet/pods/1d1c72f9-85fa-4d12-9458-9b63c1bd5fc8/volumes" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.431837 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55ad83c0-0f80-4edc-8605-e00381f07052" path="/var/lib/kubelet/pods/55ad83c0-0f80-4edc-8605-e00381f07052/volumes" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.435129 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9563c925-bde7-43c3-b7a4-7067b2b734ce" path="/var/lib/kubelet/pods/9563c925-bde7-43c3-b7a4-7067b2b734ce/volumes" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.435792 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.694790 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.704331 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8278cce4-3101-4681-b59b-8a597a462f78","Type":"ContainerStarted","Data":"f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37"} Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.704391 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8278cce4-3101-4681-b59b-8a597a462f78","Type":"ContainerStarted","Data":"700f4bf0171aebffad57df89360d7f8087e1db8bf53ef590658ecfe65f09f335"} Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.899736 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.8997097680000001 podStartE2EDuration="1.899709768s" podCreationTimestamp="2026-02-16 14:34:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:33.720371467 +0000 UTC m=+5473.047085215" watchObservedRunningTime="2026-02-16 14:34:33.899709768 +0000 UTC m=+5473.226423516" Feb 16 14:34:33 crc kubenswrapper[4816]: I0216 14:34:33.904143 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:34:34 crc kubenswrapper[4816]: I0216 14:34:34.718728 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c1b4217-ef70-4aa2-880b-b92e1d536ad8","Type":"ContainerStarted","Data":"d1e5505c71cc4204e408a903b5c207ad34a0a8329930dd2ef11a033b9cf5dbb2"} Feb 16 14:34:34 crc kubenswrapper[4816]: I0216 14:34:34.719050 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c1b4217-ef70-4aa2-880b-b92e1d536ad8","Type":"ContainerStarted","Data":"4d5b1ee8e39858dce810c13dde473b4519f86758c7c2e522083e7811a6a5d893"} Feb 16 14:34:34 crc kubenswrapper[4816]: I0216 14:34:34.719064 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c1b4217-ef70-4aa2-880b-b92e1d536ad8","Type":"ContainerStarted","Data":"dd31fb64e1a4f602efd2db35f9adf06945048651dbbf6cbd53d96107ebd82a8b"} Feb 16 14:34:34 crc kubenswrapper[4816]: I0216 14:34:34.721323 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e32b2ea-15ec-43fd-bfe6-259c75809b4a","Type":"ContainerStarted","Data":"f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9"} Feb 16 14:34:34 crc kubenswrapper[4816]: I0216 14:34:34.721379 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e32b2ea-15ec-43fd-bfe6-259c75809b4a","Type":"ContainerStarted","Data":"83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c"} Feb 16 14:34:34 crc kubenswrapper[4816]: I0216 14:34:34.721392 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e32b2ea-15ec-43fd-bfe6-259c75809b4a","Type":"ContainerStarted","Data":"ff111193962edced918e488cc2742ba209e65ee40305987364099d18eb754f92"} Feb 16 14:34:34 crc kubenswrapper[4816]: I0216 14:34:34.752411 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.752383205 podStartE2EDuration="1.752383205s" podCreationTimestamp="2026-02-16 14:34:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:34.74707274 +0000 UTC m=+5474.073786478" watchObservedRunningTime="2026-02-16 14:34:34.752383205 +0000 UTC m=+5474.079096943" Feb 16 14:34:34 crc kubenswrapper[4816]: I0216 14:34:34.777798 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.777769158 podStartE2EDuration="2.777769158s" podCreationTimestamp="2026-02-16 14:34:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:34.773402169 +0000 UTC m=+5474.100115897" watchObservedRunningTime="2026-02-16 14:34:34.777769158 +0000 UTC m=+5474.104482886" Feb 16 14:34:38 crc kubenswrapper[4816]: I0216 14:34:38.166803 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 16 14:34:38 crc kubenswrapper[4816]: I0216 14:34:38.181410 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 14:34:38 crc kubenswrapper[4816]: I0216 14:34:38.181463 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 14:34:43 crc kubenswrapper[4816]: I0216 14:34:43.167285 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 16 14:34:43 crc kubenswrapper[4816]: I0216 14:34:43.185813 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 14:34:43 crc kubenswrapper[4816]: I0216 14:34:43.185857 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 14:34:43 crc kubenswrapper[4816]: I0216 14:34:43.196412 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 16 14:34:43 crc kubenswrapper[4816]: I0216 14:34:43.371000 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 14:34:43 crc kubenswrapper[4816]: I0216 14:34:43.371363 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 14:34:43 crc kubenswrapper[4816]: I0216 14:34:43.399326 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:34:43 crc kubenswrapper[4816]: E0216 14:34:43.399607 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:34:43 crc kubenswrapper[4816]: I0216 14:34:43.832189 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 16 14:34:44 crc kubenswrapper[4816]: I0216 14:34:44.267902 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.70:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:34:44 crc kubenswrapper[4816]: I0216 14:34:44.267923 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.70:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:34:44 crc kubenswrapper[4816]: I0216 14:34:44.453841 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.71:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:34:44 crc kubenswrapper[4816]: I0216 14:34:44.453846 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.71:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.186182 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.188261 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.192684 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.197397 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.375369 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.375774 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.376525 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.376544 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.381049 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.381410 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.587154 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d64bcf49c-7phcm"] Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.588668 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.603083 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d64bcf49c-7phcm"] Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.697817 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b5zk\" (UniqueName: \"kubernetes.io/projected/00fcddf9-4f69-4c30-982b-26a3af292e61-kube-api-access-6b5zk\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.697926 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-nb\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.697980 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-dns-svc\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.698031 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-config\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.698052 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-sb\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.799376 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-config\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.799423 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-sb\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.799509 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b5zk\" (UniqueName: \"kubernetes.io/projected/00fcddf9-4f69-4c30-982b-26a3af292e61-kube-api-access-6b5zk\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.799589 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-nb\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.799628 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-dns-svc\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.800754 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-config\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.800888 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-dns-svc\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.800912 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-nb\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.801016 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-sb\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.818601 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b5zk\" (UniqueName: \"kubernetes.io/projected/00fcddf9-4f69-4c30-982b-26a3af292e61-kube-api-access-6b5zk\") pod \"dnsmasq-dns-5d64bcf49c-7phcm\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:53 crc kubenswrapper[4816]: I0216 14:34:53.928216 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:54 crc kubenswrapper[4816]: I0216 14:34:54.455628 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d64bcf49c-7phcm"] Feb 16 14:34:54 crc kubenswrapper[4816]: I0216 14:34:54.907537 4816 generic.go:334] "Generic (PLEG): container finished" podID="00fcddf9-4f69-4c30-982b-26a3af292e61" containerID="99412c21132d0fd621e70bfa2c992002ca26dfda2a279e982329f69d1eb84c09" exitCode=0 Feb 16 14:34:54 crc kubenswrapper[4816]: I0216 14:34:54.907715 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" event={"ID":"00fcddf9-4f69-4c30-982b-26a3af292e61","Type":"ContainerDied","Data":"99412c21132d0fd621e70bfa2c992002ca26dfda2a279e982329f69d1eb84c09"} Feb 16 14:34:54 crc kubenswrapper[4816]: I0216 14:34:54.907868 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" event={"ID":"00fcddf9-4f69-4c30-982b-26a3af292e61","Type":"ContainerStarted","Data":"496da3012b9a650474698887cc9e8cda422844c28074b432c7bb94ac29d8fae5"} Feb 16 14:34:55 crc kubenswrapper[4816]: I0216 14:34:55.398550 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:34:55 crc kubenswrapper[4816]: E0216 14:34:55.399034 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:34:55 crc kubenswrapper[4816]: I0216 14:34:55.917875 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" event={"ID":"00fcddf9-4f69-4c30-982b-26a3af292e61","Type":"ContainerStarted","Data":"a7177308cc69e28111e9601594a778f6b058bb396e0dd835054fe8149fb87c43"} Feb 16 14:34:55 crc kubenswrapper[4816]: I0216 14:34:55.918970 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:34:55 crc kubenswrapper[4816]: I0216 14:34:55.940365 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" podStartSLOduration=2.940348124 podStartE2EDuration="2.940348124s" podCreationTimestamp="2026-02-16 14:34:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:34:55.93286613 +0000 UTC m=+5495.259579858" watchObservedRunningTime="2026-02-16 14:34:55.940348124 +0000 UTC m=+5495.267061842" Feb 16 14:35:03 crc kubenswrapper[4816]: I0216 14:35:03.929852 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:35:03 crc kubenswrapper[4816]: I0216 14:35:03.995331 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54fbc4c87-qldfx"] Feb 16 14:35:03 crc kubenswrapper[4816]: I0216 14:35:03.995600 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" podUID="ba96c5a1-a23d-4616-b648-2644b8de5a32" containerName="dnsmasq-dns" containerID="cri-o://5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c" gracePeriod=10 Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.538979 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.610519 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-dns-svc\") pod \"ba96c5a1-a23d-4616-b648-2644b8de5a32\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.610684 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-config\") pod \"ba96c5a1-a23d-4616-b648-2644b8de5a32\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.610745 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-sb\") pod \"ba96c5a1-a23d-4616-b648-2644b8de5a32\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.610834 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcwrk\" (UniqueName: \"kubernetes.io/projected/ba96c5a1-a23d-4616-b648-2644b8de5a32-kube-api-access-fcwrk\") pod \"ba96c5a1-a23d-4616-b648-2644b8de5a32\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.610901 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-nb\") pod \"ba96c5a1-a23d-4616-b648-2644b8de5a32\" (UID: \"ba96c5a1-a23d-4616-b648-2644b8de5a32\") " Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.627368 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba96c5a1-a23d-4616-b648-2644b8de5a32-kube-api-access-fcwrk" (OuterVolumeSpecName: "kube-api-access-fcwrk") pod "ba96c5a1-a23d-4616-b648-2644b8de5a32" (UID: "ba96c5a1-a23d-4616-b648-2644b8de5a32"). InnerVolumeSpecName "kube-api-access-fcwrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.664491 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ba96c5a1-a23d-4616-b648-2644b8de5a32" (UID: "ba96c5a1-a23d-4616-b648-2644b8de5a32"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.669719 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ba96c5a1-a23d-4616-b648-2644b8de5a32" (UID: "ba96c5a1-a23d-4616-b648-2644b8de5a32"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.672804 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-config" (OuterVolumeSpecName: "config") pod "ba96c5a1-a23d-4616-b648-2644b8de5a32" (UID: "ba96c5a1-a23d-4616-b648-2644b8de5a32"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.679781 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ba96c5a1-a23d-4616-b648-2644b8de5a32" (UID: "ba96c5a1-a23d-4616-b648-2644b8de5a32"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.712511 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcwrk\" (UniqueName: \"kubernetes.io/projected/ba96c5a1-a23d-4616-b648-2644b8de5a32-kube-api-access-fcwrk\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.712554 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.712567 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.712576 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:04 crc kubenswrapper[4816]: I0216 14:35:04.712584 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba96c5a1-a23d-4616-b648-2644b8de5a32-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.028631 4816 generic.go:334] "Generic (PLEG): container finished" podID="ba96c5a1-a23d-4616-b648-2644b8de5a32" containerID="5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c" exitCode=0 Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.028807 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" event={"ID":"ba96c5a1-a23d-4616-b648-2644b8de5a32","Type":"ContainerDied","Data":"5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c"} Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.028941 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" event={"ID":"ba96c5a1-a23d-4616-b648-2644b8de5a32","Type":"ContainerDied","Data":"c205b2d064908847ef44f24fc5d39456934a0b1b9686881ea5c734f596e2ff65"} Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.028966 4816 scope.go:117] "RemoveContainer" containerID="5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c" Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.030096 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54fbc4c87-qldfx" Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.053525 4816 scope.go:117] "RemoveContainer" containerID="f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5" Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.062607 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54fbc4c87-qldfx"] Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.070828 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54fbc4c87-qldfx"] Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.195480 4816 scope.go:117] "RemoveContainer" containerID="5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c" Feb 16 14:35:05 crc kubenswrapper[4816]: E0216 14:35:05.196647 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c\": container with ID starting with 5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c not found: ID does not exist" containerID="5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c" Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.196768 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c"} err="failed to get container status \"5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c\": rpc error: code = NotFound desc = could not find container \"5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c\": container with ID starting with 5182a84bd164424b03e48b20413fe21cd41627518346626eba68fc232d74058c not found: ID does not exist" Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.196825 4816 scope.go:117] "RemoveContainer" containerID="f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5" Feb 16 14:35:05 crc kubenswrapper[4816]: E0216 14:35:05.197219 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5\": container with ID starting with f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5 not found: ID does not exist" containerID="f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5" Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.197262 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5"} err="failed to get container status \"f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5\": rpc error: code = NotFound desc = could not find container \"f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5\": container with ID starting with f05537a263a3b7a3eb3d718ca3276ed8626dedb575c2795fdae5d1bdb579abd5 not found: ID does not exist" Feb 16 14:35:05 crc kubenswrapper[4816]: I0216 14:35:05.408796 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba96c5a1-a23d-4616-b648-2644b8de5a32" path="/var/lib/kubelet/pods/ba96c5a1-a23d-4616-b648-2644b8de5a32/volumes" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.084837 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-29hsq"] Feb 16 14:35:06 crc kubenswrapper[4816]: E0216 14:35:06.085244 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba96c5a1-a23d-4616-b648-2644b8de5a32" containerName="init" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.085268 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba96c5a1-a23d-4616-b648-2644b8de5a32" containerName="init" Feb 16 14:35:06 crc kubenswrapper[4816]: E0216 14:35:06.085309 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba96c5a1-a23d-4616-b648-2644b8de5a32" containerName="dnsmasq-dns" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.085316 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba96c5a1-a23d-4616-b648-2644b8de5a32" containerName="dnsmasq-dns" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.085461 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba96c5a1-a23d-4616-b648-2644b8de5a32" containerName="dnsmasq-dns" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.086106 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.106848 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-d401-account-create-update-wfx9n"] Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.108063 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.111080 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.118564 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-29hsq"] Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.147452 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-d401-account-create-update-wfx9n"] Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.180770 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f10b42-4a5e-4960-8890-1b623275accf-operator-scripts\") pod \"cinder-db-create-29hsq\" (UID: \"65f10b42-4a5e-4960-8890-1b623275accf\") " pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.180910 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc6q5\" (UniqueName: \"kubernetes.io/projected/65f10b42-4a5e-4960-8890-1b623275accf-kube-api-access-bc6q5\") pod \"cinder-db-create-29hsq\" (UID: \"65f10b42-4a5e-4960-8890-1b623275accf\") " pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.282083 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f10b42-4a5e-4960-8890-1b623275accf-operator-scripts\") pod \"cinder-db-create-29hsq\" (UID: \"65f10b42-4a5e-4960-8890-1b623275accf\") " pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.282213 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc6q5\" (UniqueName: \"kubernetes.io/projected/65f10b42-4a5e-4960-8890-1b623275accf-kube-api-access-bc6q5\") pod \"cinder-db-create-29hsq\" (UID: \"65f10b42-4a5e-4960-8890-1b623275accf\") " pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.282251 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1390b28a-5190-432a-826c-66483fceb03c-operator-scripts\") pod \"cinder-d401-account-create-update-wfx9n\" (UID: \"1390b28a-5190-432a-826c-66483fceb03c\") " pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.282330 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpks4\" (UniqueName: \"kubernetes.io/projected/1390b28a-5190-432a-826c-66483fceb03c-kube-api-access-zpks4\") pod \"cinder-d401-account-create-update-wfx9n\" (UID: \"1390b28a-5190-432a-826c-66483fceb03c\") " pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.283220 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f10b42-4a5e-4960-8890-1b623275accf-operator-scripts\") pod \"cinder-db-create-29hsq\" (UID: \"65f10b42-4a5e-4960-8890-1b623275accf\") " pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.300531 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc6q5\" (UniqueName: \"kubernetes.io/projected/65f10b42-4a5e-4960-8890-1b623275accf-kube-api-access-bc6q5\") pod \"cinder-db-create-29hsq\" (UID: \"65f10b42-4a5e-4960-8890-1b623275accf\") " pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.384180 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1390b28a-5190-432a-826c-66483fceb03c-operator-scripts\") pod \"cinder-d401-account-create-update-wfx9n\" (UID: \"1390b28a-5190-432a-826c-66483fceb03c\") " pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.384487 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpks4\" (UniqueName: \"kubernetes.io/projected/1390b28a-5190-432a-826c-66483fceb03c-kube-api-access-zpks4\") pod \"cinder-d401-account-create-update-wfx9n\" (UID: \"1390b28a-5190-432a-826c-66483fceb03c\") " pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.384974 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1390b28a-5190-432a-826c-66483fceb03c-operator-scripts\") pod \"cinder-d401-account-create-update-wfx9n\" (UID: \"1390b28a-5190-432a-826c-66483fceb03c\") " pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.400119 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpks4\" (UniqueName: \"kubernetes.io/projected/1390b28a-5190-432a-826c-66483fceb03c-kube-api-access-zpks4\") pod \"cinder-d401-account-create-update-wfx9n\" (UID: \"1390b28a-5190-432a-826c-66483fceb03c\") " pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.405204 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.431560 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.884474 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-29hsq"] Feb 16 14:35:06 crc kubenswrapper[4816]: I0216 14:35:06.988883 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-d401-account-create-update-wfx9n"] Feb 16 14:35:06 crc kubenswrapper[4816]: W0216 14:35:06.991298 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1390b28a_5190_432a_826c_66483fceb03c.slice/crio-3cc8115748e61b9f89b8c20e7999e06abe5a61efe5bff26a575fabd386725718 WatchSource:0}: Error finding container 3cc8115748e61b9f89b8c20e7999e06abe5a61efe5bff26a575fabd386725718: Status 404 returned error can't find the container with id 3cc8115748e61b9f89b8c20e7999e06abe5a61efe5bff26a575fabd386725718 Feb 16 14:35:07 crc kubenswrapper[4816]: I0216 14:35:07.052278 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29hsq" event={"ID":"65f10b42-4a5e-4960-8890-1b623275accf","Type":"ContainerStarted","Data":"92cba9ff05ded6256e75aaec2bab4b6a51bec4abf5fbe9a3edc7f26be7141815"} Feb 16 14:35:07 crc kubenswrapper[4816]: I0216 14:35:07.053718 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d401-account-create-update-wfx9n" event={"ID":"1390b28a-5190-432a-826c-66483fceb03c","Type":"ContainerStarted","Data":"3cc8115748e61b9f89b8c20e7999e06abe5a61efe5bff26a575fabd386725718"} Feb 16 14:35:08 crc kubenswrapper[4816]: I0216 14:35:08.065941 4816 generic.go:334] "Generic (PLEG): container finished" podID="65f10b42-4a5e-4960-8890-1b623275accf" containerID="4965f5cca73baaeff91e862c10d664bc2ab212deaa5d4a969165258976ec3ca3" exitCode=0 Feb 16 14:35:08 crc kubenswrapper[4816]: I0216 14:35:08.066093 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29hsq" event={"ID":"65f10b42-4a5e-4960-8890-1b623275accf","Type":"ContainerDied","Data":"4965f5cca73baaeff91e862c10d664bc2ab212deaa5d4a969165258976ec3ca3"} Feb 16 14:35:08 crc kubenswrapper[4816]: I0216 14:35:08.068702 4816 generic.go:334] "Generic (PLEG): container finished" podID="1390b28a-5190-432a-826c-66483fceb03c" containerID="9451f71bb2c41a91e3079046ffd44d9e7367e164e313fbe5e0aa1b062154868e" exitCode=0 Feb 16 14:35:08 crc kubenswrapper[4816]: I0216 14:35:08.068761 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d401-account-create-update-wfx9n" event={"ID":"1390b28a-5190-432a-826c-66483fceb03c","Type":"ContainerDied","Data":"9451f71bb2c41a91e3079046ffd44d9e7367e164e313fbe5e0aa1b062154868e"} Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.612996 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.619186 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.769199 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f10b42-4a5e-4960-8890-1b623275accf-operator-scripts\") pod \"65f10b42-4a5e-4960-8890-1b623275accf\" (UID: \"65f10b42-4a5e-4960-8890-1b623275accf\") " Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.769304 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1390b28a-5190-432a-826c-66483fceb03c-operator-scripts\") pod \"1390b28a-5190-432a-826c-66483fceb03c\" (UID: \"1390b28a-5190-432a-826c-66483fceb03c\") " Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.769420 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bc6q5\" (UniqueName: \"kubernetes.io/projected/65f10b42-4a5e-4960-8890-1b623275accf-kube-api-access-bc6q5\") pod \"65f10b42-4a5e-4960-8890-1b623275accf\" (UID: \"65f10b42-4a5e-4960-8890-1b623275accf\") " Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.769472 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpks4\" (UniqueName: \"kubernetes.io/projected/1390b28a-5190-432a-826c-66483fceb03c-kube-api-access-zpks4\") pod \"1390b28a-5190-432a-826c-66483fceb03c\" (UID: \"1390b28a-5190-432a-826c-66483fceb03c\") " Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.770130 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65f10b42-4a5e-4960-8890-1b623275accf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "65f10b42-4a5e-4960-8890-1b623275accf" (UID: "65f10b42-4a5e-4960-8890-1b623275accf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.770421 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1390b28a-5190-432a-826c-66483fceb03c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1390b28a-5190-432a-826c-66483fceb03c" (UID: "1390b28a-5190-432a-826c-66483fceb03c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.775225 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1390b28a-5190-432a-826c-66483fceb03c-kube-api-access-zpks4" (OuterVolumeSpecName: "kube-api-access-zpks4") pod "1390b28a-5190-432a-826c-66483fceb03c" (UID: "1390b28a-5190-432a-826c-66483fceb03c"). InnerVolumeSpecName "kube-api-access-zpks4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.776450 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65f10b42-4a5e-4960-8890-1b623275accf-kube-api-access-bc6q5" (OuterVolumeSpecName: "kube-api-access-bc6q5") pod "65f10b42-4a5e-4960-8890-1b623275accf" (UID: "65f10b42-4a5e-4960-8890-1b623275accf"). InnerVolumeSpecName "kube-api-access-bc6q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.871951 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f10b42-4a5e-4960-8890-1b623275accf-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.871991 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1390b28a-5190-432a-826c-66483fceb03c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.872011 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bc6q5\" (UniqueName: \"kubernetes.io/projected/65f10b42-4a5e-4960-8890-1b623275accf-kube-api-access-bc6q5\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:09 crc kubenswrapper[4816]: I0216 14:35:09.872023 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpks4\" (UniqueName: \"kubernetes.io/projected/1390b28a-5190-432a-826c-66483fceb03c-kube-api-access-zpks4\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:10 crc kubenswrapper[4816]: I0216 14:35:10.172007 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29hsq" Feb 16 14:35:10 crc kubenswrapper[4816]: I0216 14:35:10.172016 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29hsq" event={"ID":"65f10b42-4a5e-4960-8890-1b623275accf","Type":"ContainerDied","Data":"92cba9ff05ded6256e75aaec2bab4b6a51bec4abf5fbe9a3edc7f26be7141815"} Feb 16 14:35:10 crc kubenswrapper[4816]: I0216 14:35:10.172521 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92cba9ff05ded6256e75aaec2bab4b6a51bec4abf5fbe9a3edc7f26be7141815" Feb 16 14:35:10 crc kubenswrapper[4816]: I0216 14:35:10.173928 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d401-account-create-update-wfx9n" event={"ID":"1390b28a-5190-432a-826c-66483fceb03c","Type":"ContainerDied","Data":"3cc8115748e61b9f89b8c20e7999e06abe5a61efe5bff26a575fabd386725718"} Feb 16 14:35:10 crc kubenswrapper[4816]: I0216 14:35:10.173997 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cc8115748e61b9f89b8c20e7999e06abe5a61efe5bff26a575fabd386725718" Feb 16 14:35:10 crc kubenswrapper[4816]: I0216 14:35:10.173941 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d401-account-create-update-wfx9n" Feb 16 14:35:10 crc kubenswrapper[4816]: I0216 14:35:10.399155 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:35:10 crc kubenswrapper[4816]: E0216 14:35:10.399407 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.321962 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-mltdx"] Feb 16 14:35:11 crc kubenswrapper[4816]: E0216 14:35:11.322401 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f10b42-4a5e-4960-8890-1b623275accf" containerName="mariadb-database-create" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.322420 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f10b42-4a5e-4960-8890-1b623275accf" containerName="mariadb-database-create" Feb 16 14:35:11 crc kubenswrapper[4816]: E0216 14:35:11.322468 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1390b28a-5190-432a-826c-66483fceb03c" containerName="mariadb-account-create-update" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.322477 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1390b28a-5190-432a-826c-66483fceb03c" containerName="mariadb-account-create-update" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.322693 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f10b42-4a5e-4960-8890-1b623275accf" containerName="mariadb-database-create" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.322734 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1390b28a-5190-432a-826c-66483fceb03c" containerName="mariadb-account-create-update" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.323459 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.325443 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9ps45" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.326550 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.331778 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.332914 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mltdx"] Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.498668 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl5lr\" (UniqueName: \"kubernetes.io/projected/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-kube-api-access-zl5lr\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.498949 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-combined-ca-bundle\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.499157 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-db-sync-config-data\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.499271 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-config-data\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.499344 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-scripts\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.499417 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-etc-machine-id\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.601864 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-db-sync-config-data\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.601963 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-config-data\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.602060 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-scripts\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.602163 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-etc-machine-id\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.602351 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl5lr\" (UniqueName: \"kubernetes.io/projected/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-kube-api-access-zl5lr\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.602416 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-combined-ca-bundle\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.602543 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-etc-machine-id\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.606805 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-combined-ca-bundle\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.607288 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-db-sync-config-data\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.608224 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-config-data\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.611240 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-scripts\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.618355 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl5lr\" (UniqueName: \"kubernetes.io/projected/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-kube-api-access-zl5lr\") pod \"cinder-db-sync-mltdx\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:11 crc kubenswrapper[4816]: I0216 14:35:11.642123 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:12 crc kubenswrapper[4816]: I0216 14:35:12.067845 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mltdx"] Feb 16 14:35:12 crc kubenswrapper[4816]: I0216 14:35:12.201133 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mltdx" event={"ID":"11d5e04a-cb16-4c72-b4dc-0f00875aff0f","Type":"ContainerStarted","Data":"35db075ae0d135ce30f35a503e978812b6176cb26190c550c4822204ffb11951"} Feb 16 14:35:13 crc kubenswrapper[4816]: I0216 14:35:13.211205 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mltdx" event={"ID":"11d5e04a-cb16-4c72-b4dc-0f00875aff0f","Type":"ContainerStarted","Data":"f84881e3e26461693a7c5e674f08b3d838351b759793b98bb6e3970c8b9ee75f"} Feb 16 14:35:13 crc kubenswrapper[4816]: I0216 14:35:13.237252 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-mltdx" podStartSLOduration=2.23722771 podStartE2EDuration="2.23722771s" podCreationTimestamp="2026-02-16 14:35:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:13.230564089 +0000 UTC m=+5512.557277837" watchObservedRunningTime="2026-02-16 14:35:13.23722771 +0000 UTC m=+5512.563941448" Feb 16 14:35:16 crc kubenswrapper[4816]: I0216 14:35:16.236295 4816 generic.go:334] "Generic (PLEG): container finished" podID="11d5e04a-cb16-4c72-b4dc-0f00875aff0f" containerID="f84881e3e26461693a7c5e674f08b3d838351b759793b98bb6e3970c8b9ee75f" exitCode=0 Feb 16 14:35:16 crc kubenswrapper[4816]: I0216 14:35:16.236377 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mltdx" event={"ID":"11d5e04a-cb16-4c72-b4dc-0f00875aff0f","Type":"ContainerDied","Data":"f84881e3e26461693a7c5e674f08b3d838351b759793b98bb6e3970c8b9ee75f"} Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.639516 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.696666 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-config-data\") pod \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.697112 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-etc-machine-id\") pod \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.697212 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "11d5e04a-cb16-4c72-b4dc-0f00875aff0f" (UID: "11d5e04a-cb16-4c72-b4dc-0f00875aff0f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.697319 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-db-sync-config-data\") pod \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.697510 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zl5lr\" (UniqueName: \"kubernetes.io/projected/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-kube-api-access-zl5lr\") pod \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.697680 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-scripts\") pod \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.697826 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-combined-ca-bundle\") pod \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\" (UID: \"11d5e04a-cb16-4c72-b4dc-0f00875aff0f\") " Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.698340 4816 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.702178 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-scripts" (OuterVolumeSpecName: "scripts") pod "11d5e04a-cb16-4c72-b4dc-0f00875aff0f" (UID: "11d5e04a-cb16-4c72-b4dc-0f00875aff0f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.706379 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "11d5e04a-cb16-4c72-b4dc-0f00875aff0f" (UID: "11d5e04a-cb16-4c72-b4dc-0f00875aff0f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.708960 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-kube-api-access-zl5lr" (OuterVolumeSpecName: "kube-api-access-zl5lr") pod "11d5e04a-cb16-4c72-b4dc-0f00875aff0f" (UID: "11d5e04a-cb16-4c72-b4dc-0f00875aff0f"). InnerVolumeSpecName "kube-api-access-zl5lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.731743 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11d5e04a-cb16-4c72-b4dc-0f00875aff0f" (UID: "11d5e04a-cb16-4c72-b4dc-0f00875aff0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.758325 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-config-data" (OuterVolumeSpecName: "config-data") pod "11d5e04a-cb16-4c72-b4dc-0f00875aff0f" (UID: "11d5e04a-cb16-4c72-b4dc-0f00875aff0f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.800469 4816 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.800523 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zl5lr\" (UniqueName: \"kubernetes.io/projected/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-kube-api-access-zl5lr\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.800540 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.800551 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:17 crc kubenswrapper[4816]: I0216 14:35:17.800562 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d5e04a-cb16-4c72-b4dc-0f00875aff0f-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.258565 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mltdx" event={"ID":"11d5e04a-cb16-4c72-b4dc-0f00875aff0f","Type":"ContainerDied","Data":"35db075ae0d135ce30f35a503e978812b6176cb26190c550c4822204ffb11951"} Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.258603 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mltdx" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.258608 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35db075ae0d135ce30f35a503e978812b6176cb26190c550c4822204ffb11951" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.597731 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6999d8bd47-28jrp"] Feb 16 14:35:18 crc kubenswrapper[4816]: E0216 14:35:18.604184 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11d5e04a-cb16-4c72-b4dc-0f00875aff0f" containerName="cinder-db-sync" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.604286 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="11d5e04a-cb16-4c72-b4dc-0f00875aff0f" containerName="cinder-db-sync" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.604610 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="11d5e04a-cb16-4c72-b4dc-0f00875aff0f" containerName="cinder-db-sync" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.606453 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.614548 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-config\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.614891 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z659\" (UniqueName: \"kubernetes.io/projected/2d9ee62f-4dcf-447b-a68b-aed832204fc8-kube-api-access-5z659\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.615034 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-sb\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.615123 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-nb\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.615282 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-dns-svc\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.619491 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6999d8bd47-28jrp"] Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.715826 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-sb\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.715874 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-nb\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.715922 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-dns-svc\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.715975 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-config\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.716009 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z659\" (UniqueName: \"kubernetes.io/projected/2d9ee62f-4dcf-447b-a68b-aed832204fc8-kube-api-access-5z659\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.716729 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-sb\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.717258 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-config\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.720807 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-nb\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.721915 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-dns-svc\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.739965 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z659\" (UniqueName: \"kubernetes.io/projected/2d9ee62f-4dcf-447b-a68b-aed832204fc8-kube-api-access-5z659\") pod \"dnsmasq-dns-6999d8bd47-28jrp\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.789698 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.791644 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.793729 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.793729 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.793966 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.799073 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9ps45" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.806616 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.918957 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvfml\" (UniqueName: \"kubernetes.io/projected/061f267d-ec5e-4687-8d47-0f60a3a18f07-kube-api-access-wvfml\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.919098 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data-custom\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.919196 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/061f267d-ec5e-4687-8d47-0f60a3a18f07-etc-machine-id\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.919256 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.919286 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.919338 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/061f267d-ec5e-4687-8d47-0f60a3a18f07-logs\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.919363 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-scripts\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:18 crc kubenswrapper[4816]: I0216 14:35:18.927707 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.021301 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/061f267d-ec5e-4687-8d47-0f60a3a18f07-etc-machine-id\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.021394 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/061f267d-ec5e-4687-8d47-0f60a3a18f07-etc-machine-id\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.021722 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.021764 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.021803 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/061f267d-ec5e-4687-8d47-0f60a3a18f07-logs\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.021833 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-scripts\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.021880 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvfml\" (UniqueName: \"kubernetes.io/projected/061f267d-ec5e-4687-8d47-0f60a3a18f07-kube-api-access-wvfml\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.021960 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data-custom\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.022436 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/061f267d-ec5e-4687-8d47-0f60a3a18f07-logs\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.029084 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data-custom\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.029601 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-scripts\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.030302 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.042676 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvfml\" (UniqueName: \"kubernetes.io/projected/061f267d-ec5e-4687-8d47-0f60a3a18f07-kube-api-access-wvfml\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.059004 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data\") pod \"cinder-api-0\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.111982 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.444702 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6999d8bd47-28jrp"] Feb 16 14:35:19 crc kubenswrapper[4816]: I0216 14:35:19.638530 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 16 14:35:19 crc kubenswrapper[4816]: W0216 14:35:19.639478 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod061f267d_ec5e_4687_8d47_0f60a3a18f07.slice/crio-4955632fc9905cf3c687525a36aa57a57776ee74b7bb920c74802544b1612683 WatchSource:0}: Error finding container 4955632fc9905cf3c687525a36aa57a57776ee74b7bb920c74802544b1612683: Status 404 returned error can't find the container with id 4955632fc9905cf3c687525a36aa57a57776ee74b7bb920c74802544b1612683 Feb 16 14:35:20 crc kubenswrapper[4816]: I0216 14:35:20.309494 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"061f267d-ec5e-4687-8d47-0f60a3a18f07","Type":"ContainerStarted","Data":"e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64"} Feb 16 14:35:20 crc kubenswrapper[4816]: I0216 14:35:20.309876 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"061f267d-ec5e-4687-8d47-0f60a3a18f07","Type":"ContainerStarted","Data":"4955632fc9905cf3c687525a36aa57a57776ee74b7bb920c74802544b1612683"} Feb 16 14:35:20 crc kubenswrapper[4816]: I0216 14:35:20.319083 4816 generic.go:334] "Generic (PLEG): container finished" podID="2d9ee62f-4dcf-447b-a68b-aed832204fc8" containerID="d5dda9dea26d670e878147505fe29f79d11ce51b98e636e7a59cdf39e47a38fe" exitCode=0 Feb 16 14:35:20 crc kubenswrapper[4816]: I0216 14:35:20.319143 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" event={"ID":"2d9ee62f-4dcf-447b-a68b-aed832204fc8","Type":"ContainerDied","Data":"d5dda9dea26d670e878147505fe29f79d11ce51b98e636e7a59cdf39e47a38fe"} Feb 16 14:35:20 crc kubenswrapper[4816]: I0216 14:35:20.319177 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" event={"ID":"2d9ee62f-4dcf-447b-a68b-aed832204fc8","Type":"ContainerStarted","Data":"c6f93af4dcb299d9f8c383799c51baf26f6885ff41ec451f941fbc9290d330e1"} Feb 16 14:35:21 crc kubenswrapper[4816]: I0216 14:35:21.329081 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"061f267d-ec5e-4687-8d47-0f60a3a18f07","Type":"ContainerStarted","Data":"e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d"} Feb 16 14:35:21 crc kubenswrapper[4816]: I0216 14:35:21.329447 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 16 14:35:21 crc kubenswrapper[4816]: I0216 14:35:21.332301 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" event={"ID":"2d9ee62f-4dcf-447b-a68b-aed832204fc8","Type":"ContainerStarted","Data":"6e52092c597c2064da5d45b464268075c8a0d67e2367adec76a505262da0471b"} Feb 16 14:35:21 crc kubenswrapper[4816]: I0216 14:35:21.332536 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:21 crc kubenswrapper[4816]: I0216 14:35:21.349073 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.349058107 podStartE2EDuration="3.349058107s" podCreationTimestamp="2026-02-16 14:35:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:21.344743299 +0000 UTC m=+5520.671457037" watchObservedRunningTime="2026-02-16 14:35:21.349058107 +0000 UTC m=+5520.675771835" Feb 16 14:35:22 crc kubenswrapper[4816]: I0216 14:35:22.398733 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:35:22 crc kubenswrapper[4816]: E0216 14:35:22.399288 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:35:28 crc kubenswrapper[4816]: I0216 14:35:28.930011 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:35:28 crc kubenswrapper[4816]: I0216 14:35:28.958610 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" podStartSLOduration=10.958585542 podStartE2EDuration="10.958585542s" podCreationTimestamp="2026-02-16 14:35:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:21.371970872 +0000 UTC m=+5520.698684610" watchObservedRunningTime="2026-02-16 14:35:28.958585542 +0000 UTC m=+5528.285299270" Feb 16 14:35:28 crc kubenswrapper[4816]: I0216 14:35:28.999432 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d64bcf49c-7phcm"] Feb 16 14:35:28 crc kubenswrapper[4816]: I0216 14:35:28.999758 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" podUID="00fcddf9-4f69-4c30-982b-26a3af292e61" containerName="dnsmasq-dns" containerID="cri-o://a7177308cc69e28111e9601594a778f6b058bb396e0dd835054fe8149fb87c43" gracePeriod=10 Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.430508 4816 generic.go:334] "Generic (PLEG): container finished" podID="00fcddf9-4f69-4c30-982b-26a3af292e61" containerID="a7177308cc69e28111e9601594a778f6b058bb396e0dd835054fe8149fb87c43" exitCode=0 Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.430557 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" event={"ID":"00fcddf9-4f69-4c30-982b-26a3af292e61","Type":"ContainerDied","Data":"a7177308cc69e28111e9601594a778f6b058bb396e0dd835054fe8149fb87c43"} Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.603753 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.798007 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-sb\") pod \"00fcddf9-4f69-4c30-982b-26a3af292e61\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.798366 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-dns-svc\") pod \"00fcddf9-4f69-4c30-982b-26a3af292e61\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.798482 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-config\") pod \"00fcddf9-4f69-4c30-982b-26a3af292e61\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.798711 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-nb\") pod \"00fcddf9-4f69-4c30-982b-26a3af292e61\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.798813 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b5zk\" (UniqueName: \"kubernetes.io/projected/00fcddf9-4f69-4c30-982b-26a3af292e61-kube-api-access-6b5zk\") pod \"00fcddf9-4f69-4c30-982b-26a3af292e61\" (UID: \"00fcddf9-4f69-4c30-982b-26a3af292e61\") " Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.825973 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00fcddf9-4f69-4c30-982b-26a3af292e61-kube-api-access-6b5zk" (OuterVolumeSpecName: "kube-api-access-6b5zk") pod "00fcddf9-4f69-4c30-982b-26a3af292e61" (UID: "00fcddf9-4f69-4c30-982b-26a3af292e61"). InnerVolumeSpecName "kube-api-access-6b5zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.884778 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "00fcddf9-4f69-4c30-982b-26a3af292e61" (UID: "00fcddf9-4f69-4c30-982b-26a3af292e61"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.888292 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "00fcddf9-4f69-4c30-982b-26a3af292e61" (UID: "00fcddf9-4f69-4c30-982b-26a3af292e61"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.899154 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "00fcddf9-4f69-4c30-982b-26a3af292e61" (UID: "00fcddf9-4f69-4c30-982b-26a3af292e61"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.901205 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.901234 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b5zk\" (UniqueName: \"kubernetes.io/projected/00fcddf9-4f69-4c30-982b-26a3af292e61-kube-api-access-6b5zk\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.901247 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.901259 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:29 crc kubenswrapper[4816]: I0216 14:35:29.913614 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-config" (OuterVolumeSpecName: "config") pod "00fcddf9-4f69-4c30-982b-26a3af292e61" (UID: "00fcddf9-4f69-4c30-982b-26a3af292e61"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.002751 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00fcddf9-4f69-4c30-982b-26a3af292e61-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.439946 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" event={"ID":"00fcddf9-4f69-4c30-982b-26a3af292e61","Type":"ContainerDied","Data":"496da3012b9a650474698887cc9e8cda422844c28074b432c7bb94ac29d8fae5"} Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.439975 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d64bcf49c-7phcm" Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.440004 4816 scope.go:117] "RemoveContainer" containerID="a7177308cc69e28111e9601594a778f6b058bb396e0dd835054fe8149fb87c43" Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.478357 4816 scope.go:117] "RemoveContainer" containerID="99412c21132d0fd621e70bfa2c992002ca26dfda2a279e982329f69d1eb84c09" Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.480667 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d64bcf49c-7phcm"] Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.525054 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d64bcf49c-7phcm"] Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.812526 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.812861 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-log" containerID="cri-o://83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c" gracePeriod=30 Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.812998 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-metadata" containerID="cri-o://f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9" gracePeriod=30 Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.834289 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.834730 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8278cce4-3101-4681-b59b-8a597a462f78" containerName="nova-scheduler-scheduler" containerID="cri-o://f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37" gracePeriod=30 Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.851491 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.851788 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-log" containerID="cri-o://4d5b1ee8e39858dce810c13dde473b4519f86758c7c2e522083e7811a6a5d893" gracePeriod=30 Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.851814 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-api" containerID="cri-o://d1e5505c71cc4204e408a903b5c207ad34a0a8329930dd2ef11a033b9cf5dbb2" gracePeriod=30 Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.863700 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.863921 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="7366b149-cbc0-40c5-a9aa-753f21d7c971" containerName="nova-cell0-conductor-conductor" containerID="cri-o://339b1c7edb36252ad9dec7e9db79707e345d7badab3b696c046f2351f0a67cf6" gracePeriod=30 Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.875955 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.876173 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="efedca8a-1bb7-4b68-8368-2c69fba8c489" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d" gracePeriod=30 Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.936647 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 14:35:30 crc kubenswrapper[4816]: I0216 14:35:30.936924 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="7b2ba465-5c2c-460b-8656-edc33f2015b1" containerName="nova-cell1-conductor-conductor" containerID="cri-o://6177b192061a23f28c87c9be802fd4cc201dbedbba40a631fd4503266ce7bc45" gracePeriod=30 Feb 16 14:35:31 crc kubenswrapper[4816]: I0216 14:35:31.412320 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00fcddf9-4f69-4c30-982b-26a3af292e61" path="/var/lib/kubelet/pods/00fcddf9-4f69-4c30-982b-26a3af292e61/volumes" Feb 16 14:35:31 crc kubenswrapper[4816]: I0216 14:35:31.451443 4816 generic.go:334] "Generic (PLEG): container finished" podID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerID="4d5b1ee8e39858dce810c13dde473b4519f86758c7c2e522083e7811a6a5d893" exitCode=143 Feb 16 14:35:31 crc kubenswrapper[4816]: I0216 14:35:31.451524 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c1b4217-ef70-4aa2-880b-b92e1d536ad8","Type":"ContainerDied","Data":"4d5b1ee8e39858dce810c13dde473b4519f86758c7c2e522083e7811a6a5d893"} Feb 16 14:35:31 crc kubenswrapper[4816]: I0216 14:35:31.453689 4816 generic.go:334] "Generic (PLEG): container finished" podID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerID="83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c" exitCode=143 Feb 16 14:35:31 crc kubenswrapper[4816]: I0216 14:35:31.453721 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e32b2ea-15ec-43fd-bfe6-259c75809b4a","Type":"ContainerDied","Data":"83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c"} Feb 16 14:35:31 crc kubenswrapper[4816]: I0216 14:35:31.520705 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 16 14:35:31 crc kubenswrapper[4816]: I0216 14:35:31.919251 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.027169 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-config-data\") pod \"efedca8a-1bb7-4b68-8368-2c69fba8c489\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.027277 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-combined-ca-bundle\") pod \"efedca8a-1bb7-4b68-8368-2c69fba8c489\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.027309 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qw7t\" (UniqueName: \"kubernetes.io/projected/efedca8a-1bb7-4b68-8368-2c69fba8c489-kube-api-access-7qw7t\") pod \"efedca8a-1bb7-4b68-8368-2c69fba8c489\" (UID: \"efedca8a-1bb7-4b68-8368-2c69fba8c489\") " Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.083452 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efedca8a-1bb7-4b68-8368-2c69fba8c489-kube-api-access-7qw7t" (OuterVolumeSpecName: "kube-api-access-7qw7t") pod "efedca8a-1bb7-4b68-8368-2c69fba8c489" (UID: "efedca8a-1bb7-4b68-8368-2c69fba8c489"). InnerVolumeSpecName "kube-api-access-7qw7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.101447 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "efedca8a-1bb7-4b68-8368-2c69fba8c489" (UID: "efedca8a-1bb7-4b68-8368-2c69fba8c489"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.123550 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-config-data" (OuterVolumeSpecName: "config-data") pod "efedca8a-1bb7-4b68-8368-2c69fba8c489" (UID: "efedca8a-1bb7-4b68-8368-2c69fba8c489"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.131342 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.131378 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efedca8a-1bb7-4b68-8368-2c69fba8c489-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.131391 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qw7t\" (UniqueName: \"kubernetes.io/projected/efedca8a-1bb7-4b68-8368-2c69fba8c489-kube-api-access-7qw7t\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.303540 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.336114 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzv96\" (UniqueName: \"kubernetes.io/projected/8278cce4-3101-4681-b59b-8a597a462f78-kube-api-access-zzv96\") pod \"8278cce4-3101-4681-b59b-8a597a462f78\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.344905 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8278cce4-3101-4681-b59b-8a597a462f78-kube-api-access-zzv96" (OuterVolumeSpecName: "kube-api-access-zzv96") pod "8278cce4-3101-4681-b59b-8a597a462f78" (UID: "8278cce4-3101-4681-b59b-8a597a462f78"). InnerVolumeSpecName "kube-api-access-zzv96". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.436923 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-config-data\") pod \"8278cce4-3101-4681-b59b-8a597a462f78\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.437578 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-combined-ca-bundle\") pod \"8278cce4-3101-4681-b59b-8a597a462f78\" (UID: \"8278cce4-3101-4681-b59b-8a597a462f78\") " Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.438054 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzv96\" (UniqueName: \"kubernetes.io/projected/8278cce4-3101-4681-b59b-8a597a462f78-kube-api-access-zzv96\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.462527 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-config-data" (OuterVolumeSpecName: "config-data") pod "8278cce4-3101-4681-b59b-8a597a462f78" (UID: "8278cce4-3101-4681-b59b-8a597a462f78"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.464368 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8278cce4-3101-4681-b59b-8a597a462f78" (UID: "8278cce4-3101-4681-b59b-8a597a462f78"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.468416 4816 generic.go:334] "Generic (PLEG): container finished" podID="8278cce4-3101-4681-b59b-8a597a462f78" containerID="f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37" exitCode=0 Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.468465 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8278cce4-3101-4681-b59b-8a597a462f78","Type":"ContainerDied","Data":"f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37"} Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.468501 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8278cce4-3101-4681-b59b-8a597a462f78","Type":"ContainerDied","Data":"700f4bf0171aebffad57df89360d7f8087e1db8bf53ef590658ecfe65f09f335"} Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.468519 4816 scope.go:117] "RemoveContainer" containerID="f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.468629 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.473215 4816 generic.go:334] "Generic (PLEG): container finished" podID="efedca8a-1bb7-4b68-8368-2c69fba8c489" containerID="b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d" exitCode=0 Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.473244 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"efedca8a-1bb7-4b68-8368-2c69fba8c489","Type":"ContainerDied","Data":"b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d"} Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.473262 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"efedca8a-1bb7-4b68-8368-2c69fba8c489","Type":"ContainerDied","Data":"2503654d1dbd6c1041b6b58cec9672cbfb7c7516a7c93592a129993395e6da9e"} Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.473299 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.506761 4816 scope.go:117] "RemoveContainer" containerID="f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37" Feb 16 14:35:32 crc kubenswrapper[4816]: E0216 14:35:32.507737 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37\": container with ID starting with f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37 not found: ID does not exist" containerID="f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.507791 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37"} err="failed to get container status \"f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37\": rpc error: code = NotFound desc = could not find container \"f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37\": container with ID starting with f632a57a25c3abadedfcad48a9a54d834634b1c06d99b0094fa84d0ced9e1b37 not found: ID does not exist" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.507819 4816 scope.go:117] "RemoveContainer" containerID="b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.523727 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.539564 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.539601 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8278cce4-3101-4681-b59b-8a597a462f78-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.539972 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.555128 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.567973 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.576683 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.596858 4816 scope.go:117] "RemoveContainer" containerID="b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d" Feb 16 14:35:32 crc kubenswrapper[4816]: E0216 14:35:32.597291 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d\": container with ID starting with b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d not found: ID does not exist" containerID="b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.597330 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d"} err="failed to get container status \"b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d\": rpc error: code = NotFound desc = could not find container \"b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d\": container with ID starting with b19121624dbcb8b8b9437e28ad93056d7a022a38c74272529819bc0432a8d14d not found: ID does not exist" Feb 16 14:35:32 crc kubenswrapper[4816]: E0216 14:35:32.597543 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efedca8a-1bb7-4b68-8368-2c69fba8c489" containerName="nova-cell1-novncproxy-novncproxy" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.597570 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="efedca8a-1bb7-4b68-8368-2c69fba8c489" containerName="nova-cell1-novncproxy-novncproxy" Feb 16 14:35:32 crc kubenswrapper[4816]: E0216 14:35:32.597586 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8278cce4-3101-4681-b59b-8a597a462f78" containerName="nova-scheduler-scheduler" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.597593 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8278cce4-3101-4681-b59b-8a597a462f78" containerName="nova-scheduler-scheduler" Feb 16 14:35:32 crc kubenswrapper[4816]: E0216 14:35:32.597609 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00fcddf9-4f69-4c30-982b-26a3af292e61" containerName="init" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.597615 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="00fcddf9-4f69-4c30-982b-26a3af292e61" containerName="init" Feb 16 14:35:32 crc kubenswrapper[4816]: E0216 14:35:32.597645 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00fcddf9-4f69-4c30-982b-26a3af292e61" containerName="dnsmasq-dns" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.600417 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="00fcddf9-4f69-4c30-982b-26a3af292e61" containerName="dnsmasq-dns" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.600609 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8278cce4-3101-4681-b59b-8a597a462f78" containerName="nova-scheduler-scheduler" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.600628 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="efedca8a-1bb7-4b68-8368-2c69fba8c489" containerName="nova-cell1-novncproxy-novncproxy" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.600644 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="00fcddf9-4f69-4c30-982b-26a3af292e61" containerName="dnsmasq-dns" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.601216 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.601962 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.602042 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.602500 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.605513 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.605713 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.627901 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.744963 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e9fd6ce-2013-406b-a1a9-b9c948f0dca7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.745111 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjq6w\" (UniqueName: \"kubernetes.io/projected/a918762a-682a-4191-afeb-8a5b2de9de86-kube-api-access-rjq6w\") pod \"nova-scheduler-0\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.745173 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.745316 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjtlm\" (UniqueName: \"kubernetes.io/projected/1e9fd6ce-2013-406b-a1a9-b9c948f0dca7-kube-api-access-qjtlm\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.745407 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9fd6ce-2013-406b-a1a9-b9c948f0dca7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.745465 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-config-data\") pod \"nova-scheduler-0\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.884586 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e9fd6ce-2013-406b-a1a9-b9c948f0dca7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.884718 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjq6w\" (UniqueName: \"kubernetes.io/projected/a918762a-682a-4191-afeb-8a5b2de9de86-kube-api-access-rjq6w\") pod \"nova-scheduler-0\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.884775 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.884810 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjtlm\" (UniqueName: \"kubernetes.io/projected/1e9fd6ce-2013-406b-a1a9-b9c948f0dca7-kube-api-access-qjtlm\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.884842 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9fd6ce-2013-406b-a1a9-b9c948f0dca7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.884877 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-config-data\") pod \"nova-scheduler-0\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.895953 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9fd6ce-2013-406b-a1a9-b9c948f0dca7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.896862 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.900616 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e9fd6ce-2013-406b-a1a9-b9c948f0dca7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.902458 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-config-data\") pod \"nova-scheduler-0\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.905380 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjq6w\" (UniqueName: \"kubernetes.io/projected/a918762a-682a-4191-afeb-8a5b2de9de86-kube-api-access-rjq6w\") pod \"nova-scheduler-0\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.920806 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjtlm\" (UniqueName: \"kubernetes.io/projected/1e9fd6ce-2013-406b-a1a9-b9c948f0dca7-kube-api-access-qjtlm\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7\") " pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.948292 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 14:35:32 crc kubenswrapper[4816]: I0216 14:35:32.968617 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.422824 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8278cce4-3101-4681-b59b-8a597a462f78" path="/var/lib/kubelet/pods/8278cce4-3101-4681-b59b-8a597a462f78/volumes" Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.424218 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efedca8a-1bb7-4b68-8368-2c69fba8c489" path="/var/lib/kubelet/pods/efedca8a-1bb7-4b68-8368-2c69fba8c489/volumes" Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.483205 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.510013 4816 generic.go:334] "Generic (PLEG): container finished" podID="7366b149-cbc0-40c5-a9aa-753f21d7c971" containerID="339b1c7edb36252ad9dec7e9db79707e345d7badab3b696c046f2351f0a67cf6" exitCode=0 Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.510067 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7366b149-cbc0-40c5-a9aa-753f21d7c971","Type":"ContainerDied","Data":"339b1c7edb36252ad9dec7e9db79707e345d7badab3b696c046f2351f0a67cf6"} Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.511152 4816 generic.go:334] "Generic (PLEG): container finished" podID="7b2ba465-5c2c-460b-8656-edc33f2015b1" containerID="6177b192061a23f28c87c9be802fd4cc201dbedbba40a631fd4503266ce7bc45" exitCode=0 Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.511180 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7b2ba465-5c2c-460b-8656-edc33f2015b1","Type":"ContainerDied","Data":"6177b192061a23f28c87c9be802fd4cc201dbedbba40a631fd4503266ce7bc45"} Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.598753 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.818799 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.854105 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.982402 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.70:8775/\": read tcp 10.217.0.2:33798->10.217.1.70:8775: read: connection reset by peer" Feb 16 14:35:33 crc kubenswrapper[4816]: I0216 14:35:33.982640 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.70:8775/\": read tcp 10.217.0.2:33796->10.217.1.70:8775: read: connection reset by peer" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.023115 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-combined-ca-bundle\") pod \"7366b149-cbc0-40c5-a9aa-753f21d7c971\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.023230 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-combined-ca-bundle\") pod \"7b2ba465-5c2c-460b-8656-edc33f2015b1\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.023351 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vv8pq\" (UniqueName: \"kubernetes.io/projected/7366b149-cbc0-40c5-a9aa-753f21d7c971-kube-api-access-vv8pq\") pod \"7366b149-cbc0-40c5-a9aa-753f21d7c971\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.023428 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-config-data\") pod \"7366b149-cbc0-40c5-a9aa-753f21d7c971\" (UID: \"7366b149-cbc0-40c5-a9aa-753f21d7c971\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.023450 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-config-data\") pod \"7b2ba465-5c2c-460b-8656-edc33f2015b1\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.023480 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g5wb\" (UniqueName: \"kubernetes.io/projected/7b2ba465-5c2c-460b-8656-edc33f2015b1-kube-api-access-6g5wb\") pod \"7b2ba465-5c2c-460b-8656-edc33f2015b1\" (UID: \"7b2ba465-5c2c-460b-8656-edc33f2015b1\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.027834 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7366b149-cbc0-40c5-a9aa-753f21d7c971-kube-api-access-vv8pq" (OuterVolumeSpecName: "kube-api-access-vv8pq") pod "7366b149-cbc0-40c5-a9aa-753f21d7c971" (UID: "7366b149-cbc0-40c5-a9aa-753f21d7c971"). InnerVolumeSpecName "kube-api-access-vv8pq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.029102 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b2ba465-5c2c-460b-8656-edc33f2015b1-kube-api-access-6g5wb" (OuterVolumeSpecName: "kube-api-access-6g5wb") pod "7b2ba465-5c2c-460b-8656-edc33f2015b1" (UID: "7b2ba465-5c2c-460b-8656-edc33f2015b1"). InnerVolumeSpecName "kube-api-access-6g5wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.038818 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-api-0" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.71:8774/\": read tcp 10.217.0.2:52434->10.217.1.71:8774: read: connection reset by peer" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.039738 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-api-0" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.71:8774/\": read tcp 10.217.0.2:52426->10.217.1.71:8774: read: connection reset by peer" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.074528 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7366b149-cbc0-40c5-a9aa-753f21d7c971" (UID: "7366b149-cbc0-40c5-a9aa-753f21d7c971"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.074559 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-config-data" (OuterVolumeSpecName: "config-data") pod "7b2ba465-5c2c-460b-8656-edc33f2015b1" (UID: "7b2ba465-5c2c-460b-8656-edc33f2015b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.085116 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b2ba465-5c2c-460b-8656-edc33f2015b1" (UID: "7b2ba465-5c2c-460b-8656-edc33f2015b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.085296 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-config-data" (OuterVolumeSpecName: "config-data") pod "7366b149-cbc0-40c5-a9aa-753f21d7c971" (UID: "7366b149-cbc0-40c5-a9aa-753f21d7c971"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.126230 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.126281 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vv8pq\" (UniqueName: \"kubernetes.io/projected/7366b149-cbc0-40c5-a9aa-753f21d7c971-kube-api-access-vv8pq\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.126300 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.126311 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b2ba465-5c2c-460b-8656-edc33f2015b1-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.126326 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g5wb\" (UniqueName: \"kubernetes.io/projected/7b2ba465-5c2c-460b-8656-edc33f2015b1-kube-api-access-6g5wb\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.126339 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7366b149-cbc0-40c5-a9aa-753f21d7c971-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.442069 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.538506 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-config-data\") pod \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.538627 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8qm9\" (UniqueName: \"kubernetes.io/projected/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-kube-api-access-k8qm9\") pod \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.538815 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-logs\") pod \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.538861 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-combined-ca-bundle\") pod \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\" (UID: \"5e32b2ea-15ec-43fd-bfe6-259c75809b4a\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.541751 4816 generic.go:334] "Generic (PLEG): container finished" podID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerID="d1e5505c71cc4204e408a903b5c207ad34a0a8329930dd2ef11a033b9cf5dbb2" exitCode=0 Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.541890 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c1b4217-ef70-4aa2-880b-b92e1d536ad8","Type":"ContainerDied","Data":"d1e5505c71cc4204e408a903b5c207ad34a0a8329930dd2ef11a033b9cf5dbb2"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.541925 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7c1b4217-ef70-4aa2-880b-b92e1d536ad8","Type":"ContainerDied","Data":"dd31fb64e1a4f602efd2db35f9adf06945048651dbbf6cbd53d96107ebd82a8b"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.541940 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd31fb64e1a4f602efd2db35f9adf06945048651dbbf6cbd53d96107ebd82a8b" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.542735 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-kube-api-access-k8qm9" (OuterVolumeSpecName: "kube-api-access-k8qm9") pod "5e32b2ea-15ec-43fd-bfe6-259c75809b4a" (UID: "5e32b2ea-15ec-43fd-bfe6-259c75809b4a"). InnerVolumeSpecName "kube-api-access-k8qm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.544628 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7366b149-cbc0-40c5-a9aa-753f21d7c971","Type":"ContainerDied","Data":"6e70c03c28c88c2996b5642e8f4594559170c0668ce53c4872a59140d3196a00"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.544725 4816 scope.go:117] "RemoveContainer" containerID="339b1c7edb36252ad9dec7e9db79707e345d7badab3b696c046f2351f0a67cf6" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.545084 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.547286 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-logs" (OuterVolumeSpecName: "logs") pod "5e32b2ea-15ec-43fd-bfe6-259c75809b4a" (UID: "5e32b2ea-15ec-43fd-bfe6-259c75809b4a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.591949 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7","Type":"ContainerStarted","Data":"933d4b43f808a53647ab561b158f65b2d06a6e4b506fcc9b80c008056f2c4f23"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.592000 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1e9fd6ce-2013-406b-a1a9-b9c948f0dca7","Type":"ContainerStarted","Data":"e05eb52c72cf2dd73b431ff2c0c1ef10e70077e4710dcc5c3fd0de40c876eca5"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.592868 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e32b2ea-15ec-43fd-bfe6-259c75809b4a" (UID: "5e32b2ea-15ec-43fd-bfe6-259c75809b4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.596887 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-config-data" (OuterVolumeSpecName: "config-data") pod "5e32b2ea-15ec-43fd-bfe6-259c75809b4a" (UID: "5e32b2ea-15ec-43fd-bfe6-259c75809b4a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.613002 4816 generic.go:334] "Generic (PLEG): container finished" podID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerID="f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9" exitCode=0 Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.613120 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e32b2ea-15ec-43fd-bfe6-259c75809b4a","Type":"ContainerDied","Data":"f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.613173 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e32b2ea-15ec-43fd-bfe6-259c75809b4a","Type":"ContainerDied","Data":"ff111193962edced918e488cc2742ba209e65ee40305987364099d18eb754f92"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.613285 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.634011 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7b2ba465-5c2c-460b-8656-edc33f2015b1","Type":"ContainerDied","Data":"df443f2afe33ed1b3bf2d7675e96289c990204a05c29e41ee1ff1d3229ca0216"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.634127 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.635158 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.635139064 podStartE2EDuration="2.635139064s" podCreationTimestamp="2026-02-16 14:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:34.6239964 +0000 UTC m=+5533.950710138" watchObservedRunningTime="2026-02-16 14:35:34.635139064 +0000 UTC m=+5533.961852792" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.643444 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.643490 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.643507 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.643521 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8qm9\" (UniqueName: \"kubernetes.io/projected/5e32b2ea-15ec-43fd-bfe6-259c75809b4a-kube-api-access-k8qm9\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.645819 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a918762a-682a-4191-afeb-8a5b2de9de86","Type":"ContainerStarted","Data":"f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.645870 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a918762a-682a-4191-afeb-8a5b2de9de86","Type":"ContainerStarted","Data":"523029ffb14834e23b2f1c180ca83aec8abdcfaf61de7eca006d69f5dc143745"} Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.697178 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.697159306 podStartE2EDuration="2.697159306s" podCreationTimestamp="2026-02-16 14:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:34.664106154 +0000 UTC m=+5533.990819882" watchObservedRunningTime="2026-02-16 14:35:34.697159306 +0000 UTC m=+5534.023873024" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.702418 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.709968 4816 scope.go:117] "RemoveContainer" containerID="f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.850161 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 14:35:34 crc kubenswrapper[4816]: E0216 14:35:34.881323 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b2ba465_5c2c_460b_8656_edc33f2015b1.slice/crio-df443f2afe33ed1b3bf2d7675e96289c990204a05c29e41ee1ff1d3229ca0216\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b2ba465_5c2c_460b_8656_edc33f2015b1.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.883927 4816 scope.go:117] "RemoveContainer" containerID="83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.941333 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-logs\") pod \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.941451 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-combined-ca-bundle\") pod \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.941529 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxs8t\" (UniqueName: \"kubernetes.io/projected/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-kube-api-access-zxs8t\") pod \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.941684 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-config-data\") pod \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\" (UID: \"7c1b4217-ef70-4aa2-880b-b92e1d536ad8\") " Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.952201 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-logs" (OuterVolumeSpecName: "logs") pod "7c1b4217-ef70-4aa2-880b-b92e1d536ad8" (UID: "7c1b4217-ef70-4aa2-880b-b92e1d536ad8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.954377 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.955579 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-kube-api-access-zxs8t" (OuterVolumeSpecName: "kube-api-access-zxs8t") pod "7c1b4217-ef70-4aa2-880b-b92e1d536ad8" (UID: "7c1b4217-ef70-4aa2-880b-b92e1d536ad8"). InnerVolumeSpecName "kube-api-access-zxs8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.968573 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 14:35:34 crc kubenswrapper[4816]: E0216 14:35:34.969248 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-log" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969266 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-log" Feb 16 14:35:34 crc kubenswrapper[4816]: E0216 14:35:34.969281 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-log" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969287 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-log" Feb 16 14:35:34 crc kubenswrapper[4816]: E0216 14:35:34.969335 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7366b149-cbc0-40c5-a9aa-753f21d7c971" containerName="nova-cell0-conductor-conductor" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969343 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7366b149-cbc0-40c5-a9aa-753f21d7c971" containerName="nova-cell0-conductor-conductor" Feb 16 14:35:34 crc kubenswrapper[4816]: E0216 14:35:34.969355 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b2ba465-5c2c-460b-8656-edc33f2015b1" containerName="nova-cell1-conductor-conductor" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969361 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b2ba465-5c2c-460b-8656-edc33f2015b1" containerName="nova-cell1-conductor-conductor" Feb 16 14:35:34 crc kubenswrapper[4816]: E0216 14:35:34.969377 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-api" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969408 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-api" Feb 16 14:35:34 crc kubenswrapper[4816]: E0216 14:35:34.969421 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-metadata" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969427 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-metadata" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969687 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-metadata" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969735 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-log" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969747 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" containerName="nova-api-api" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969759 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7366b149-cbc0-40c5-a9aa-753f21d7c971" containerName="nova-cell0-conductor-conductor" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969766 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" containerName="nova-metadata-log" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.969783 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b2ba465-5c2c-460b-8656-edc33f2015b1" containerName="nova-cell1-conductor-conductor" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.970868 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.970869 4816 scope.go:117] "RemoveContainer" containerID="f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9" Feb 16 14:35:34 crc kubenswrapper[4816]: E0216 14:35:34.971328 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9\": container with ID starting with f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9 not found: ID does not exist" containerID="f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.971365 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9"} err="failed to get container status \"f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9\": rpc error: code = NotFound desc = could not find container \"f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9\": container with ID starting with f220e3c66df7f690ae3ede29dffe4572ed333472e4b3c9844b5a7887410e28d9 not found: ID does not exist" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.971387 4816 scope.go:117] "RemoveContainer" containerID="83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c" Feb 16 14:35:34 crc kubenswrapper[4816]: E0216 14:35:34.971946 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c\": container with ID starting with 83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c not found: ID does not exist" containerID="83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.971983 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c"} err="failed to get container status \"83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c\": rpc error: code = NotFound desc = could not find container \"83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c\": container with ID starting with 83c0d724223b575dead10d41eb269f29b7eb83d2daa40e9838448b2b525ea24c not found: ID does not exist" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.972046 4816 scope.go:117] "RemoveContainer" containerID="6177b192061a23f28c87c9be802fd4cc201dbedbba40a631fd4503266ce7bc45" Feb 16 14:35:34 crc kubenswrapper[4816]: I0216 14:35:34.989293 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.003308 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.014862 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.015048 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c1b4217-ef70-4aa2-880b-b92e1d536ad8" (UID: "7c1b4217-ef70-4aa2-880b-b92e1d536ad8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.043701 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.043769 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.043875 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch6hr\" (UniqueName: \"kubernetes.io/projected/329ccf42-5f39-4f99-a3c7-4ddc76208882-kube-api-access-ch6hr\") pod \"nova-cell0-conductor-0\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.043963 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.043986 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.043999 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxs8t\" (UniqueName: \"kubernetes.io/projected/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-kube-api-access-zxs8t\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.055924 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.070885 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-config-data" (OuterVolumeSpecName: "config-data") pod "7c1b4217-ef70-4aa2-880b-b92e1d536ad8" (UID: "7c1b4217-ef70-4aa2-880b-b92e1d536ad8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.093747 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.117444 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.129717 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.130997 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.136574 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.140244 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.141794 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145470 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145518 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145567 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145621 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-config-data\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145667 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145691 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145712 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-logs\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145732 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56dmz\" (UniqueName: \"kubernetes.io/projected/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-kube-api-access-56dmz\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145748 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch6hr\" (UniqueName: \"kubernetes.io/projected/329ccf42-5f39-4f99-a3c7-4ddc76208882-kube-api-access-ch6hr\") pod \"nova-cell0-conductor-0\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145789 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9qs5\" (UniqueName: \"kubernetes.io/projected/04537661-4733-45d6-a694-48c9cde1cbb5-kube-api-access-h9qs5\") pod \"nova-cell1-conductor-0\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.145849 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c1b4217-ef70-4aa2-880b-b92e1d536ad8-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.146567 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.152211 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.154848 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.155363 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.171304 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch6hr\" (UniqueName: \"kubernetes.io/projected/329ccf42-5f39-4f99-a3c7-4ddc76208882-kube-api-access-ch6hr\") pod \"nova-cell0-conductor-0\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.179360 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.247527 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-logs\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.247606 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56dmz\" (UniqueName: \"kubernetes.io/projected/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-kube-api-access-56dmz\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.247707 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9qs5\" (UniqueName: \"kubernetes.io/projected/04537661-4733-45d6-a694-48c9cde1cbb5-kube-api-access-h9qs5\") pod \"nova-cell1-conductor-0\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.247775 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.247866 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-config-data\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.247927 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.247963 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.249411 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-logs\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.252504 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-config-data\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.253162 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.254409 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.255745 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.266572 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56dmz\" (UniqueName: \"kubernetes.io/projected/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-kube-api-access-56dmz\") pod \"nova-metadata-0\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.271723 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9qs5\" (UniqueName: \"kubernetes.io/projected/04537661-4733-45d6-a694-48c9cde1cbb5-kube-api-access-h9qs5\") pod \"nova-cell1-conductor-0\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.369626 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.400868 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:35:35 crc kubenswrapper[4816]: E0216 14:35:35.401113 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.415268 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e32b2ea-15ec-43fd-bfe6-259c75809b4a" path="/var/lib/kubelet/pods/5e32b2ea-15ec-43fd-bfe6-259c75809b4a/volumes" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.416016 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7366b149-cbc0-40c5-a9aa-753f21d7c971" path="/var/lib/kubelet/pods/7366b149-cbc0-40c5-a9aa-753f21d7c971/volumes" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.416588 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b2ba465-5c2c-460b-8656-edc33f2015b1" path="/var/lib/kubelet/pods/7b2ba465-5c2c-460b-8656-edc33f2015b1/volumes" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.458146 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.467754 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.668530 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.864982 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.890036 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: W0216 14:35:35.903428 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod329ccf42_5f39_4f99_a3c7_4ddc76208882.slice/crio-6985ade96fb4dd2479c965efd5da1e35cf1805f87d236c1ae26cb770221c2d16 WatchSource:0}: Error finding container 6985ade96fb4dd2479c965efd5da1e35cf1805f87d236c1ae26cb770221c2d16: Status 404 returned error can't find the container with id 6985ade96fb4dd2479c965efd5da1e35cf1805f87d236c1ae26cb770221c2d16 Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.912518 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.929106 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.937426 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.956353 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 16 14:35:35 crc kubenswrapper[4816]: I0216 14:35:35.967703 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.049952 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e814f4db-24c6-4fb2-b389-3ab964e8fe40-logs\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.050129 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.050212 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md4v6\" (UniqueName: \"kubernetes.io/projected/e814f4db-24c6-4fb2-b389-3ab964e8fe40-kube-api-access-md4v6\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.050245 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-config-data\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.154545 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e814f4db-24c6-4fb2-b389-3ab964e8fe40-logs\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.154738 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.154910 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md4v6\" (UniqueName: \"kubernetes.io/projected/e814f4db-24c6-4fb2-b389-3ab964e8fe40-kube-api-access-md4v6\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.154963 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-config-data\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.155338 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e814f4db-24c6-4fb2-b389-3ab964e8fe40-logs\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.160058 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.160719 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-config-data\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.177001 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md4v6\" (UniqueName: \"kubernetes.io/projected/e814f4db-24c6-4fb2-b389-3ab964e8fe40-kube-api-access-md4v6\") pod \"nova-api-0\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: W0216 14:35:36.185856 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04537661_4733_45d6_a694_48c9cde1cbb5.slice/crio-d33fcc493407e3e03478d0b824a47f42f06b2bed554a2063feb0d5f5e928314c WatchSource:0}: Error finding container d33fcc493407e3e03478d0b824a47f42f06b2bed554a2063feb0d5f5e928314c: Status 404 returned error can't find the container with id d33fcc493407e3e03478d0b824a47f42f06b2bed554a2063feb0d5f5e928314c Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.192097 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.205985 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 14:35:36 crc kubenswrapper[4816]: W0216 14:35:36.211844 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce6e206c_659f_4813_ab40_1cdc6ab9e22d.slice/crio-e06d9657c90ade4a43602a3cdcfc4584993f7ff494fd76217b78c627fe33e4ed WatchSource:0}: Error finding container e06d9657c90ade4a43602a3cdcfc4584993f7ff494fd76217b78c627fe33e4ed: Status 404 returned error can't find the container with id e06d9657c90ade4a43602a3cdcfc4584993f7ff494fd76217b78c627fe33e4ed Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.465427 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.695433 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce6e206c-659f-4813-ab40-1cdc6ab9e22d","Type":"ContainerStarted","Data":"d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca"} Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.695759 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce6e206c-659f-4813-ab40-1cdc6ab9e22d","Type":"ContainerStarted","Data":"2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452"} Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.695772 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce6e206c-659f-4813-ab40-1cdc6ab9e22d","Type":"ContainerStarted","Data":"e06d9657c90ade4a43602a3cdcfc4584993f7ff494fd76217b78c627fe33e4ed"} Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.701550 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"329ccf42-5f39-4f99-a3c7-4ddc76208882","Type":"ContainerStarted","Data":"8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a"} Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.701581 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"329ccf42-5f39-4f99-a3c7-4ddc76208882","Type":"ContainerStarted","Data":"6985ade96fb4dd2479c965efd5da1e35cf1805f87d236c1ae26cb770221c2d16"} Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.701959 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.705461 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"04537661-4733-45d6-a694-48c9cde1cbb5","Type":"ContainerStarted","Data":"c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe"} Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.705515 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"04537661-4733-45d6-a694-48c9cde1cbb5","Type":"ContainerStarted","Data":"d33fcc493407e3e03478d0b824a47f42f06b2bed554a2063feb0d5f5e928314c"} Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.705697 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.729430 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.729219342 podStartE2EDuration="2.729219342s" podCreationTimestamp="2026-02-16 14:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:36.723749043 +0000 UTC m=+5536.050462781" watchObservedRunningTime="2026-02-16 14:35:36.729219342 +0000 UTC m=+5536.055933070" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.757589 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.757570005 podStartE2EDuration="2.757570005s" podCreationTimestamp="2026-02-16 14:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:36.746689708 +0000 UTC m=+5536.073403466" watchObservedRunningTime="2026-02-16 14:35:36.757570005 +0000 UTC m=+5536.084283733" Feb 16 14:35:36 crc kubenswrapper[4816]: I0216 14:35:36.770825 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.770803656 podStartE2EDuration="2.770803656s" podCreationTimestamp="2026-02-16 14:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:36.766065677 +0000 UTC m=+5536.092779425" watchObservedRunningTime="2026-02-16 14:35:36.770803656 +0000 UTC m=+5536.097517384" Feb 16 14:35:37 crc kubenswrapper[4816]: I0216 14:35:37.028086 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 14:35:37 crc kubenswrapper[4816]: I0216 14:35:37.410999 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c1b4217-ef70-4aa2-880b-b92e1d536ad8" path="/var/lib/kubelet/pods/7c1b4217-ef70-4aa2-880b-b92e1d536ad8/volumes" Feb 16 14:35:37 crc kubenswrapper[4816]: I0216 14:35:37.715477 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e814f4db-24c6-4fb2-b389-3ab964e8fe40","Type":"ContainerStarted","Data":"bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453"} Feb 16 14:35:37 crc kubenswrapper[4816]: I0216 14:35:37.715509 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e814f4db-24c6-4fb2-b389-3ab964e8fe40","Type":"ContainerStarted","Data":"b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848"} Feb 16 14:35:37 crc kubenswrapper[4816]: I0216 14:35:37.715520 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e814f4db-24c6-4fb2-b389-3ab964e8fe40","Type":"ContainerStarted","Data":"a1f6412ec3e573f7e2ca99eb7e8bba01d918656d0b468d2861e2b310a6410a81"} Feb 16 14:35:37 crc kubenswrapper[4816]: I0216 14:35:37.736860 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.736817915 podStartE2EDuration="2.736817915s" podCreationTimestamp="2026-02-16 14:35:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:37.733706621 +0000 UTC m=+5537.060420379" watchObservedRunningTime="2026-02-16 14:35:37.736817915 +0000 UTC m=+5537.063531643" Feb 16 14:35:37 crc kubenswrapper[4816]: I0216 14:35:37.949073 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 16 14:35:37 crc kubenswrapper[4816]: I0216 14:35:37.969038 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:40 crc kubenswrapper[4816]: I0216 14:35:40.468413 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 14:35:40 crc kubenswrapper[4816]: I0216 14:35:40.469624 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 14:35:42 crc kubenswrapper[4816]: I0216 14:35:42.948603 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 16 14:35:42 crc kubenswrapper[4816]: I0216 14:35:42.969002 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:42 crc kubenswrapper[4816]: I0216 14:35:42.981396 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 16 14:35:42 crc kubenswrapper[4816]: I0216 14:35:42.981458 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:43 crc kubenswrapper[4816]: I0216 14:35:43.772710 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 16 14:35:43 crc kubenswrapper[4816]: I0216 14:35:43.802444 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 16 14:35:45 crc kubenswrapper[4816]: I0216 14:35:45.409609 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 16 14:35:45 crc kubenswrapper[4816]: I0216 14:35:45.469566 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 14:35:45 crc kubenswrapper[4816]: I0216 14:35:45.471334 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 14:35:45 crc kubenswrapper[4816]: I0216 14:35:45.485482 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 16 14:35:46 crc kubenswrapper[4816]: I0216 14:35:46.466720 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 14:35:46 crc kubenswrapper[4816]: I0216 14:35:46.466774 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 14:35:46 crc kubenswrapper[4816]: I0216 14:35:46.550879 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.82:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:35:46 crc kubenswrapper[4816]: I0216 14:35:46.550879 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.82:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:35:47 crc kubenswrapper[4816]: I0216 14:35:47.399733 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:35:47 crc kubenswrapper[4816]: E0216 14:35:47.399992 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:35:47 crc kubenswrapper[4816]: I0216 14:35:47.549842 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.83:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:35:47 crc kubenswrapper[4816]: I0216 14:35:47.550011 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.83:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.668393 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.670310 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.672485 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.682212 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.856611 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.856732 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdvjd\" (UniqueName: \"kubernetes.io/projected/9f0db743-ea5c-4fba-8b5b-708dbe170061-kube-api-access-hdvjd\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.856760 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.856785 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-scripts\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.856804 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.856844 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f0db743-ea5c-4fba-8b5b-708dbe170061-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.958584 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdvjd\" (UniqueName: \"kubernetes.io/projected/9f0db743-ea5c-4fba-8b5b-708dbe170061-kube-api-access-hdvjd\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.958626 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.958672 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-scripts\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.958693 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.958730 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f0db743-ea5c-4fba-8b5b-708dbe170061-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.958777 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.959226 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f0db743-ea5c-4fba-8b5b-708dbe170061-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.966042 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-scripts\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.966512 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.966567 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.967545 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.977814 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdvjd\" (UniqueName: \"kubernetes.io/projected/9f0db743-ea5c-4fba-8b5b-708dbe170061-kube-api-access-hdvjd\") pod \"cinder-scheduler-0\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " pod="openstack/cinder-scheduler-0" Feb 16 14:35:49 crc kubenswrapper[4816]: I0216 14:35:49.989989 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 14:35:50 crc kubenswrapper[4816]: W0216 14:35:50.487860 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f0db743_ea5c_4fba_8b5b_708dbe170061.slice/crio-448571380b600a02078e9598e310784f9f34ef5471c7efad7713f863bc1db85f WatchSource:0}: Error finding container 448571380b600a02078e9598e310784f9f34ef5471c7efad7713f863bc1db85f: Status 404 returned error can't find the container with id 448571380b600a02078e9598e310784f9f34ef5471c7efad7713f863bc1db85f Feb 16 14:35:50 crc kubenswrapper[4816]: I0216 14:35:50.489276 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 14:35:50 crc kubenswrapper[4816]: I0216 14:35:50.833765 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9f0db743-ea5c-4fba-8b5b-708dbe170061","Type":"ContainerStarted","Data":"448571380b600a02078e9598e310784f9f34ef5471c7efad7713f863bc1db85f"} Feb 16 14:35:50 crc kubenswrapper[4816]: I0216 14:35:50.837944 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 16 14:35:50 crc kubenswrapper[4816]: I0216 14:35:50.840361 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerName="cinder-api-log" containerID="cri-o://e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64" gracePeriod=30 Feb 16 14:35:50 crc kubenswrapper[4816]: I0216 14:35:50.840452 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerName="cinder-api" containerID="cri-o://e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d" gracePeriod=30 Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.338292 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.340097 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.344884 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.384835 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395404 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-sys\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395461 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395491 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395516 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395537 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395574 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-run\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395594 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395611 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395635 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395693 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v252l\" (UniqueName: \"kubernetes.io/projected/7c0c2726-3681-43b2-8697-230ab9f116c3-kube-api-access-v252l\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395730 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395760 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-dev\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395780 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395816 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7c0c2726-3681-43b2-8697-230ab9f116c3-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395845 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.395889 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.496948 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-sys\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.496995 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497020 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497044 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497062 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497069 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-sys\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497109 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-run\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497156 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-run\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497167 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497207 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497265 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497320 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v252l\" (UniqueName: \"kubernetes.io/projected/7c0c2726-3681-43b2-8697-230ab9f116c3-kube-api-access-v252l\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497363 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497396 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-dev\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497422 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497421 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497453 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7c0c2726-3681-43b2-8697-230ab9f116c3-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497532 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497629 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.498005 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.497364 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.498201 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.498241 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.498277 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-dev\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.498761 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.498815 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/7c0c2726-3681-43b2-8697-230ab9f116c3-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.505683 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.505780 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7c0c2726-3681-43b2-8697-230ab9f116c3-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.506421 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.508304 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.514189 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c0c2726-3681-43b2-8697-230ab9f116c3-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.515378 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v252l\" (UniqueName: \"kubernetes.io/projected/7c0c2726-3681-43b2-8697-230ab9f116c3-kube-api-access-v252l\") pod \"cinder-volume-volume1-0\" (UID: \"7c0c2726-3681-43b2-8697-230ab9f116c3\") " pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.670845 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.856178 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9f0db743-ea5c-4fba-8b5b-708dbe170061","Type":"ContainerStarted","Data":"6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2"} Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.856524 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9f0db743-ea5c-4fba-8b5b-708dbe170061","Type":"ContainerStarted","Data":"d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b"} Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.860483 4816 generic.go:334] "Generic (PLEG): container finished" podID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerID="e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64" exitCode=143 Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.860536 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"061f267d-ec5e-4687-8d47-0f60a3a18f07","Type":"ContainerDied","Data":"e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64"} Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.887136 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.887087385 podStartE2EDuration="2.887087385s" podCreationTimestamp="2026-02-16 14:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:51.879547319 +0000 UTC m=+5551.206261047" watchObservedRunningTime="2026-02-16 14:35:51.887087385 +0000 UTC m=+5551.213801113" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.979917 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.981890 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.989018 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Feb 16 14:35:51 crc kubenswrapper[4816]: I0216 14:35:51.999329 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.111199 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e0a0b2de-affb-4954-bb88-fbc263a54b06-ceph\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.111542 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.111706 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.111952 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-scripts\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112073 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-sys\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112213 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-config-data\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112257 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-etc-nvme\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112301 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-run\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112326 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112348 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112383 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112458 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112487 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-dev\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112504 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-lib-modules\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112589 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j7dm\" (UniqueName: \"kubernetes.io/projected/e0a0b2de-affb-4954-bb88-fbc263a54b06-kube-api-access-6j7dm\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.112638 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-config-data-custom\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.214257 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e0a0b2de-affb-4954-bb88-fbc263a54b06-ceph\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.214311 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.214353 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.214449 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.214383 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-scripts\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.214504 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-sys\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.214528 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.214744 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-sys\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215371 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-config-data\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215404 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-etc-nvme\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215437 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-run\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215457 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215470 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215489 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215519 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215535 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-dev\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215548 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-lib-modules\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215603 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j7dm\" (UniqueName: \"kubernetes.io/projected/e0a0b2de-affb-4954-bb88-fbc263a54b06-kube-api-access-6j7dm\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215640 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-config-data-custom\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215936 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.215986 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-etc-nvme\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.216009 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-run\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.216042 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.218790 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-lib-modules\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.218823 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.218783 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/e0a0b2de-affb-4954-bb88-fbc263a54b06-dev\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.219682 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e0a0b2de-affb-4954-bb88-fbc263a54b06-ceph\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.221645 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-config-data\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.221989 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-scripts\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.222430 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.228291 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0a0b2de-affb-4954-bb88-fbc263a54b06-config-data-custom\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.247148 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j7dm\" (UniqueName: \"kubernetes.io/projected/e0a0b2de-affb-4954-bb88-fbc263a54b06-kube-api-access-6j7dm\") pod \"cinder-backup-0\" (UID: \"e0a0b2de-affb-4954-bb88-fbc263a54b06\") " pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.265715 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.271436 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.344127 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Feb 16 14:35:52 crc kubenswrapper[4816]: I0216 14:35:52.870586 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"7c0c2726-3681-43b2-8697-230ab9f116c3","Type":"ContainerStarted","Data":"a33999ba877b3b08c8a4ff8a8d92c313ee94174461f1fe42c50623786549dc4f"} Feb 16 14:35:53 crc kubenswrapper[4816]: I0216 14:35:53.002047 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Feb 16 14:35:53 crc kubenswrapper[4816]: W0216 14:35:53.002648 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0a0b2de_affb_4954_bb88_fbc263a54b06.slice/crio-cab0c47ed150d7822ae7bfc515a06b478203b4778bee1aa0ff1862ed8aa8d5c1 WatchSource:0}: Error finding container cab0c47ed150d7822ae7bfc515a06b478203b4778bee1aa0ff1862ed8aa8d5c1: Status 404 returned error can't find the container with id cab0c47ed150d7822ae7bfc515a06b478203b4778bee1aa0ff1862ed8aa8d5c1 Feb 16 14:35:53 crc kubenswrapper[4816]: I0216 14:35:53.886950 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"7c0c2726-3681-43b2-8697-230ab9f116c3","Type":"ContainerStarted","Data":"eeb7549303802f5e7ca41c002c68b25d25afd1af34765346ba1c9ef23aa57a23"} Feb 16 14:35:53 crc kubenswrapper[4816]: I0216 14:35:53.887397 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"7c0c2726-3681-43b2-8697-230ab9f116c3","Type":"ContainerStarted","Data":"f429addd99ba8619c1fd9339018b5440600c105cc89fc6886e160d27bb0e3331"} Feb 16 14:35:53 crc kubenswrapper[4816]: I0216 14:35:53.889995 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"e0a0b2de-affb-4954-bb88-fbc263a54b06","Type":"ContainerStarted","Data":"1cfe85a6b6b686601e0b04ec96ead497a43ea4005617172e6fba03aac4461e87"} Feb 16 14:35:53 crc kubenswrapper[4816]: I0216 14:35:53.890069 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"e0a0b2de-affb-4954-bb88-fbc263a54b06","Type":"ContainerStarted","Data":"cab0c47ed150d7822ae7bfc515a06b478203b4778bee1aa0ff1862ed8aa8d5c1"} Feb 16 14:35:53 crc kubenswrapper[4816]: I0216 14:35:53.923713 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=1.939605743 podStartE2EDuration="2.923690675s" podCreationTimestamp="2026-02-16 14:35:51 +0000 UTC" firstStartedPulling="2026-02-16 14:35:52.265196328 +0000 UTC m=+5551.591910056" lastFinishedPulling="2026-02-16 14:35:53.24928126 +0000 UTC m=+5552.575994988" observedRunningTime="2026-02-16 14:35:53.919049758 +0000 UTC m=+5553.245763486" watchObservedRunningTime="2026-02-16 14:35:53.923690675 +0000 UTC m=+5553.250404403" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.113341 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.77:8776/healthcheck\": dial tcp 10.217.1.77:8776: connect: connection refused" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.442344 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.569137 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data-custom\") pod \"061f267d-ec5e-4687-8d47-0f60a3a18f07\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.569201 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/061f267d-ec5e-4687-8d47-0f60a3a18f07-logs\") pod \"061f267d-ec5e-4687-8d47-0f60a3a18f07\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.569282 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-combined-ca-bundle\") pod \"061f267d-ec5e-4687-8d47-0f60a3a18f07\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.569335 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-scripts\") pod \"061f267d-ec5e-4687-8d47-0f60a3a18f07\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.569388 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/061f267d-ec5e-4687-8d47-0f60a3a18f07-etc-machine-id\") pod \"061f267d-ec5e-4687-8d47-0f60a3a18f07\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.569426 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data\") pod \"061f267d-ec5e-4687-8d47-0f60a3a18f07\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.569496 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvfml\" (UniqueName: \"kubernetes.io/projected/061f267d-ec5e-4687-8d47-0f60a3a18f07-kube-api-access-wvfml\") pod \"061f267d-ec5e-4687-8d47-0f60a3a18f07\" (UID: \"061f267d-ec5e-4687-8d47-0f60a3a18f07\") " Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.569964 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/061f267d-ec5e-4687-8d47-0f60a3a18f07-logs" (OuterVolumeSpecName: "logs") pod "061f267d-ec5e-4687-8d47-0f60a3a18f07" (UID: "061f267d-ec5e-4687-8d47-0f60a3a18f07"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.570017 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/061f267d-ec5e-4687-8d47-0f60a3a18f07-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "061f267d-ec5e-4687-8d47-0f60a3a18f07" (UID: "061f267d-ec5e-4687-8d47-0f60a3a18f07"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.570098 4816 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/061f267d-ec5e-4687-8d47-0f60a3a18f07-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.570116 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/061f267d-ec5e-4687-8d47-0f60a3a18f07-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.574756 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "061f267d-ec5e-4687-8d47-0f60a3a18f07" (UID: "061f267d-ec5e-4687-8d47-0f60a3a18f07"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.574878 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-scripts" (OuterVolumeSpecName: "scripts") pod "061f267d-ec5e-4687-8d47-0f60a3a18f07" (UID: "061f267d-ec5e-4687-8d47-0f60a3a18f07"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.576622 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/061f267d-ec5e-4687-8d47-0f60a3a18f07-kube-api-access-wvfml" (OuterVolumeSpecName: "kube-api-access-wvfml") pod "061f267d-ec5e-4687-8d47-0f60a3a18f07" (UID: "061f267d-ec5e-4687-8d47-0f60a3a18f07"). InnerVolumeSpecName "kube-api-access-wvfml". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.596755 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "061f267d-ec5e-4687-8d47-0f60a3a18f07" (UID: "061f267d-ec5e-4687-8d47-0f60a3a18f07"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.618633 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data" (OuterVolumeSpecName: "config-data") pod "061f267d-ec5e-4687-8d47-0f60a3a18f07" (UID: "061f267d-ec5e-4687-8d47-0f60a3a18f07"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.672330 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.672372 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.672385 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.672397 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061f267d-ec5e-4687-8d47-0f60a3a18f07-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.672410 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvfml\" (UniqueName: \"kubernetes.io/projected/061f267d-ec5e-4687-8d47-0f60a3a18f07-kube-api-access-wvfml\") on node \"crc\" DevicePath \"\"" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.901009 4816 generic.go:334] "Generic (PLEG): container finished" podID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerID="e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d" exitCode=0 Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.901083 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"061f267d-ec5e-4687-8d47-0f60a3a18f07","Type":"ContainerDied","Data":"e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d"} Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.901117 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"061f267d-ec5e-4687-8d47-0f60a3a18f07","Type":"ContainerDied","Data":"4955632fc9905cf3c687525a36aa57a57776ee74b7bb920c74802544b1612683"} Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.901139 4816 scope.go:117] "RemoveContainer" containerID="e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.901299 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.909067 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"e0a0b2de-affb-4954-bb88-fbc263a54b06","Type":"ContainerStarted","Data":"b836d05769e41b12f79602f0887269e782ee449ad22342885f9f934551833a66"} Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.934605 4816 scope.go:117] "RemoveContainer" containerID="e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.950560 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.277936457 podStartE2EDuration="3.950539973s" podCreationTimestamp="2026-02-16 14:35:51 +0000 UTC" firstStartedPulling="2026-02-16 14:35:53.005185432 +0000 UTC m=+5552.331899160" lastFinishedPulling="2026-02-16 14:35:53.677788948 +0000 UTC m=+5553.004502676" observedRunningTime="2026-02-16 14:35:54.934837415 +0000 UTC m=+5554.261551143" watchObservedRunningTime="2026-02-16 14:35:54.950539973 +0000 UTC m=+5554.277253701" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.958197 4816 scope.go:117] "RemoveContainer" containerID="e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d" Feb 16 14:35:54 crc kubenswrapper[4816]: E0216 14:35:54.958845 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d\": container with ID starting with e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d not found: ID does not exist" containerID="e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.958907 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d"} err="failed to get container status \"e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d\": rpc error: code = NotFound desc = could not find container \"e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d\": container with ID starting with e8f38680bdb8cc699fbede06745f5a3c218012ef660548fe4b07c77d2ebf105d not found: ID does not exist" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.958934 4816 scope.go:117] "RemoveContainer" containerID="e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64" Feb 16 14:35:54 crc kubenswrapper[4816]: E0216 14:35:54.959328 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64\": container with ID starting with e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64 not found: ID does not exist" containerID="e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.959378 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64"} err="failed to get container status \"e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64\": rpc error: code = NotFound desc = could not find container \"e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64\": container with ID starting with e8d86d2a055fc3dd200cd07d8713aa8f48557c8c37793664daf1765605e47d64 not found: ID does not exist" Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.990080 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 16 14:35:54 crc kubenswrapper[4816]: I0216 14:35:54.990154 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.009060 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.017069 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 16 14:35:55 crc kubenswrapper[4816]: E0216 14:35:55.017614 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerName="cinder-api" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.017677 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerName="cinder-api" Feb 16 14:35:55 crc kubenswrapper[4816]: E0216 14:35:55.017699 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerName="cinder-api-log" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.017708 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerName="cinder-api-log" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.017948 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerName="cinder-api-log" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.017981 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" containerName="cinder-api" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.019319 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.021282 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.027148 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.088750 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-config-data\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.088801 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0135ee69-6313-403e-97f7-7675511cc726-logs\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.088844 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.088893 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0135ee69-6313-403e-97f7-7675511cc726-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.089257 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-scripts\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.089471 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dbvv\" (UniqueName: \"kubernetes.io/projected/0135ee69-6313-403e-97f7-7675511cc726-kube-api-access-6dbvv\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.089503 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-config-data-custom\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.191147 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0135ee69-6313-403e-97f7-7675511cc726-logs\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.191706 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.191782 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0135ee69-6313-403e-97f7-7675511cc726-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.191934 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-scripts\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.191961 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0135ee69-6313-403e-97f7-7675511cc726-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.191967 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dbvv\" (UniqueName: \"kubernetes.io/projected/0135ee69-6313-403e-97f7-7675511cc726-kube-api-access-6dbvv\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.192046 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-config-data-custom\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.192242 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-config-data\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.192920 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0135ee69-6313-403e-97f7-7675511cc726-logs\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.197943 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.198269 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-scripts\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.199763 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-config-data\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.200457 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0135ee69-6313-403e-97f7-7675511cc726-config-data-custom\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.213748 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dbvv\" (UniqueName: \"kubernetes.io/projected/0135ee69-6313-403e-97f7-7675511cc726-kube-api-access-6dbvv\") pod \"cinder-api-0\" (UID: \"0135ee69-6313-403e-97f7-7675511cc726\") " pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.384634 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.419083 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="061f267d-ec5e-4687-8d47-0f60a3a18f07" path="/var/lib/kubelet/pods/061f267d-ec5e-4687-8d47-0f60a3a18f07/volumes" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.473072 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.483585 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.483786 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.843133 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.920689 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0135ee69-6313-403e-97f7-7675511cc726","Type":"ContainerStarted","Data":"116d52eb1c139a9a4a5cd1ce514c3967dd938304ceda26866f265801032cbb38"} Feb 16 14:35:55 crc kubenswrapper[4816]: I0216 14:35:55.925097 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 14:35:56 crc kubenswrapper[4816]: I0216 14:35:56.470698 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 14:35:56 crc kubenswrapper[4816]: I0216 14:35:56.471073 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 14:35:56 crc kubenswrapper[4816]: I0216 14:35:56.471444 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 14:35:56 crc kubenswrapper[4816]: I0216 14:35:56.471488 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 14:35:56 crc kubenswrapper[4816]: I0216 14:35:56.474784 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 14:35:56 crc kubenswrapper[4816]: I0216 14:35:56.475997 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 14:35:56 crc kubenswrapper[4816]: I0216 14:35:56.671424 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Feb 16 14:35:56 crc kubenswrapper[4816]: I0216 14:35:56.943885 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0135ee69-6313-403e-97f7-7675511cc726","Type":"ContainerStarted","Data":"43229bdd75814a63ccbfe9cceaad01bdfdfaaaf8586cb1873eac42e0bbb9f9e6"} Feb 16 14:35:57 crc kubenswrapper[4816]: I0216 14:35:57.356611 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Feb 16 14:35:57 crc kubenswrapper[4816]: I0216 14:35:57.955190 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0135ee69-6313-403e-97f7-7675511cc726","Type":"ContainerStarted","Data":"7cdd4db96524dd4000c030e321db34c9f64529304f4f4ebb791726bd131ab41b"} Feb 16 14:35:57 crc kubenswrapper[4816]: I0216 14:35:57.955738 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 16 14:35:57 crc kubenswrapper[4816]: I0216 14:35:57.980412 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.980390955 podStartE2EDuration="3.980390955s" podCreationTimestamp="2026-02-16 14:35:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:35:57.970790722 +0000 UTC m=+5557.297504461" watchObservedRunningTime="2026-02-16 14:35:57.980390955 +0000 UTC m=+5557.307104683" Feb 16 14:36:00 crc kubenswrapper[4816]: I0216 14:36:00.242293 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 16 14:36:00 crc kubenswrapper[4816]: I0216 14:36:00.305379 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 14:36:00 crc kubenswrapper[4816]: I0216 14:36:00.980568 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerName="cinder-scheduler" containerID="cri-o://d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b" gracePeriod=30 Feb 16 14:36:00 crc kubenswrapper[4816]: I0216 14:36:00.980739 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerName="probe" containerID="cri-o://6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2" gracePeriod=30 Feb 16 14:36:01 crc kubenswrapper[4816]: I0216 14:36:01.896473 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Feb 16 14:36:01 crc kubenswrapper[4816]: I0216 14:36:01.990674 4816 generic.go:334] "Generic (PLEG): container finished" podID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerID="6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2" exitCode=0 Feb 16 14:36:01 crc kubenswrapper[4816]: I0216 14:36:01.990719 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9f0db743-ea5c-4fba-8b5b-708dbe170061","Type":"ContainerDied","Data":"6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2"} Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.402607 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:36:02 crc kubenswrapper[4816]: E0216 14:36:02.403052 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.590788 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.632505 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.665344 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data-custom\") pod \"9f0db743-ea5c-4fba-8b5b-708dbe170061\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.665416 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data\") pod \"9f0db743-ea5c-4fba-8b5b-708dbe170061\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.665477 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-combined-ca-bundle\") pod \"9f0db743-ea5c-4fba-8b5b-708dbe170061\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.665646 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f0db743-ea5c-4fba-8b5b-708dbe170061-etc-machine-id\") pod \"9f0db743-ea5c-4fba-8b5b-708dbe170061\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.665725 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-scripts\") pod \"9f0db743-ea5c-4fba-8b5b-708dbe170061\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.665835 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdvjd\" (UniqueName: \"kubernetes.io/projected/9f0db743-ea5c-4fba-8b5b-708dbe170061-kube-api-access-hdvjd\") pod \"9f0db743-ea5c-4fba-8b5b-708dbe170061\" (UID: \"9f0db743-ea5c-4fba-8b5b-708dbe170061\") " Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.665996 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9f0db743-ea5c-4fba-8b5b-708dbe170061-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9f0db743-ea5c-4fba-8b5b-708dbe170061" (UID: "9f0db743-ea5c-4fba-8b5b-708dbe170061"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.666987 4816 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f0db743-ea5c-4fba-8b5b-708dbe170061-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.670897 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-scripts" (OuterVolumeSpecName: "scripts") pod "9f0db743-ea5c-4fba-8b5b-708dbe170061" (UID: "9f0db743-ea5c-4fba-8b5b-708dbe170061"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.683338 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9f0db743-ea5c-4fba-8b5b-708dbe170061" (UID: "9f0db743-ea5c-4fba-8b5b-708dbe170061"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.686799 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f0db743-ea5c-4fba-8b5b-708dbe170061-kube-api-access-hdvjd" (OuterVolumeSpecName: "kube-api-access-hdvjd") pod "9f0db743-ea5c-4fba-8b5b-708dbe170061" (UID: "9f0db743-ea5c-4fba-8b5b-708dbe170061"). InnerVolumeSpecName "kube-api-access-hdvjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.737352 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f0db743-ea5c-4fba-8b5b-708dbe170061" (UID: "9f0db743-ea5c-4fba-8b5b-708dbe170061"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.768774 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdvjd\" (UniqueName: \"kubernetes.io/projected/9f0db743-ea5c-4fba-8b5b-708dbe170061-kube-api-access-hdvjd\") on node \"crc\" DevicePath \"\"" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.768815 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.768825 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.768833 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.782178 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data" (OuterVolumeSpecName: "config-data") pod "9f0db743-ea5c-4fba-8b5b-708dbe170061" (UID: "9f0db743-ea5c-4fba-8b5b-708dbe170061"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:36:02 crc kubenswrapper[4816]: I0216 14:36:02.870743 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f0db743-ea5c-4fba-8b5b-708dbe170061-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.003143 4816 generic.go:334] "Generic (PLEG): container finished" podID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerID="d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b" exitCode=0 Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.003193 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9f0db743-ea5c-4fba-8b5b-708dbe170061","Type":"ContainerDied","Data":"d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b"} Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.003226 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9f0db743-ea5c-4fba-8b5b-708dbe170061","Type":"ContainerDied","Data":"448571380b600a02078e9598e310784f9f34ef5471c7efad7713f863bc1db85f"} Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.003251 4816 scope.go:117] "RemoveContainer" containerID="6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.003258 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.035060 4816 scope.go:117] "RemoveContainer" containerID="d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.044017 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.063110 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.071769 4816 scope.go:117] "RemoveContainer" containerID="6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2" Feb 16 14:36:03 crc kubenswrapper[4816]: E0216 14:36:03.072316 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2\": container with ID starting with 6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2 not found: ID does not exist" containerID="6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.072360 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2"} err="failed to get container status \"6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2\": rpc error: code = NotFound desc = could not find container \"6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2\": container with ID starting with 6034db05b34c2bb8ee6b477d2b757d22f71a7f95431785eb4c6e3714f71dc1f2 not found: ID does not exist" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.072387 4816 scope.go:117] "RemoveContainer" containerID="d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.072440 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 14:36:03 crc kubenswrapper[4816]: E0216 14:36:03.072825 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b\": container with ID starting with d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b not found: ID does not exist" containerID="d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b" Feb 16 14:36:03 crc kubenswrapper[4816]: E0216 14:36:03.072884 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerName="cinder-scheduler" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.072901 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerName="cinder-scheduler" Feb 16 14:36:03 crc kubenswrapper[4816]: E0216 14:36:03.072919 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerName="probe" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.072928 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerName="probe" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.072885 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b"} err="failed to get container status \"d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b\": rpc error: code = NotFound desc = could not find container \"d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b\": container with ID starting with d881d64582eeaf5991f53cb1f9d3079958ea14652465794e6e46b92eae51cb5b not found: ID does not exist" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.073150 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerName="probe" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.073177 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f0db743-ea5c-4fba-8b5b-708dbe170061" containerName="cinder-scheduler" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.074302 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.076249 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.090992 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.175755 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd608722-b67c-4a82-a808-68519c126126-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.175816 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-config-data\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.175893 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7xzs\" (UniqueName: \"kubernetes.io/projected/fd608722-b67c-4a82-a808-68519c126126-kube-api-access-x7xzs\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.175935 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.175967 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-scripts\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.176024 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.277886 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-scripts\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.277986 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.278058 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd608722-b67c-4a82-a808-68519c126126-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.278086 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-config-data\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.278122 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7xzs\" (UniqueName: \"kubernetes.io/projected/fd608722-b67c-4a82-a808-68519c126126-kube-api-access-x7xzs\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.278169 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.278206 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd608722-b67c-4a82-a808-68519c126126-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.281518 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.283841 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-scripts\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.288329 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-config-data\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.290497 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd608722-b67c-4a82-a808-68519c126126-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.298309 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7xzs\" (UniqueName: \"kubernetes.io/projected/fd608722-b67c-4a82-a808-68519c126126-kube-api-access-x7xzs\") pod \"cinder-scheduler-0\" (UID: \"fd608722-b67c-4a82-a808-68519c126126\") " pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.392236 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.414881 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f0db743-ea5c-4fba-8b5b-708dbe170061" path="/var/lib/kubelet/pods/9f0db743-ea5c-4fba-8b5b-708dbe170061/volumes" Feb 16 14:36:03 crc kubenswrapper[4816]: I0216 14:36:03.879708 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 16 14:36:04 crc kubenswrapper[4816]: I0216 14:36:04.015548 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fd608722-b67c-4a82-a808-68519c126126","Type":"ContainerStarted","Data":"50de6f824721bd54bfd36e946aeae3d305fd4d57c181c78944c6c849b38f7892"} Feb 16 14:36:05 crc kubenswrapper[4816]: I0216 14:36:05.025615 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fd608722-b67c-4a82-a808-68519c126126","Type":"ContainerStarted","Data":"52821ea2b601d6c03ff1e072d416953f0229d11205416dc938cd2f11de84576b"} Feb 16 14:36:05 crc kubenswrapper[4816]: I0216 14:36:05.026080 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fd608722-b67c-4a82-a808-68519c126126","Type":"ContainerStarted","Data":"e2b462d155cef21d3b677cc1944776df73f6764366164ceb7fd14cb1e9a23d18"} Feb 16 14:36:05 crc kubenswrapper[4816]: I0216 14:36:05.063598 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.063574055 podStartE2EDuration="2.063574055s" podCreationTimestamp="2026-02-16 14:36:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:36:05.057808137 +0000 UTC m=+5564.384521865" watchObservedRunningTime="2026-02-16 14:36:05.063574055 +0000 UTC m=+5564.390287783" Feb 16 14:36:07 crc kubenswrapper[4816]: I0216 14:36:07.285544 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 16 14:36:08 crc kubenswrapper[4816]: I0216 14:36:08.393219 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 16 14:36:13 crc kubenswrapper[4816]: I0216 14:36:13.669332 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 16 14:36:16 crc kubenswrapper[4816]: I0216 14:36:16.399375 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:36:16 crc kubenswrapper[4816]: E0216 14:36:16.399967 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:36:27 crc kubenswrapper[4816]: I0216 14:36:27.398754 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:36:27 crc kubenswrapper[4816]: E0216 14:36:27.399486 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:36:42 crc kubenswrapper[4816]: I0216 14:36:42.399684 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:36:43 crc kubenswrapper[4816]: I0216 14:36:43.377350 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"cd2cf9fe35bf79ffe24a1838af8ddd11554eb97681bc8bd3d107b775cf4e6572"} Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.078975 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-llgb5"] Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.081901 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.115446 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-llgb5"] Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.188717 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68wzl\" (UniqueName: \"kubernetes.io/projected/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-kube-api-access-68wzl\") pod \"redhat-marketplace-llgb5\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.188774 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-catalog-content\") pod \"redhat-marketplace-llgb5\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.188822 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-utilities\") pod \"redhat-marketplace-llgb5\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.290235 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68wzl\" (UniqueName: \"kubernetes.io/projected/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-kube-api-access-68wzl\") pod \"redhat-marketplace-llgb5\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.290295 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-catalog-content\") pod \"redhat-marketplace-llgb5\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.290330 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-utilities\") pod \"redhat-marketplace-llgb5\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.290879 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-utilities\") pod \"redhat-marketplace-llgb5\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.291902 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-catalog-content\") pod \"redhat-marketplace-llgb5\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.315572 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68wzl\" (UniqueName: \"kubernetes.io/projected/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-kube-api-access-68wzl\") pod \"redhat-marketplace-llgb5\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.412225 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:15 crc kubenswrapper[4816]: I0216 14:37:15.930896 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-llgb5"] Feb 16 14:37:16 crc kubenswrapper[4816]: I0216 14:37:16.715367 4816 generic.go:334] "Generic (PLEG): container finished" podID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerID="60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13" exitCode=0 Feb 16 14:37:16 crc kubenswrapper[4816]: I0216 14:37:16.715519 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-llgb5" event={"ID":"48f39552-d9af-404d-8d9f-24ae6c0f8bd3","Type":"ContainerDied","Data":"60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13"} Feb 16 14:37:16 crc kubenswrapper[4816]: I0216 14:37:16.715747 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-llgb5" event={"ID":"48f39552-d9af-404d-8d9f-24ae6c0f8bd3","Type":"ContainerStarted","Data":"0640379e0236928ad1c4758511721ca7e4b75ad3e6b857454759239633395997"} Feb 16 14:37:17 crc kubenswrapper[4816]: I0216 14:37:17.729962 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-llgb5" event={"ID":"48f39552-d9af-404d-8d9f-24ae6c0f8bd3","Type":"ContainerStarted","Data":"feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765"} Feb 16 14:37:17 crc kubenswrapper[4816]: I0216 14:37:17.878668 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lqxfs"] Feb 16 14:37:17 crc kubenswrapper[4816]: I0216 14:37:17.883800 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:17 crc kubenswrapper[4816]: I0216 14:37:17.890626 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lqxfs"] Feb 16 14:37:17 crc kubenswrapper[4816]: I0216 14:37:17.936920 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-utilities\") pod \"redhat-operators-lqxfs\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:17 crc kubenswrapper[4816]: I0216 14:37:17.937210 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6d8r\" (UniqueName: \"kubernetes.io/projected/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-kube-api-access-m6d8r\") pod \"redhat-operators-lqxfs\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:17 crc kubenswrapper[4816]: I0216 14:37:17.937327 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-catalog-content\") pod \"redhat-operators-lqxfs\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.038955 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6d8r\" (UniqueName: \"kubernetes.io/projected/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-kube-api-access-m6d8r\") pod \"redhat-operators-lqxfs\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.039032 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-catalog-content\") pod \"redhat-operators-lqxfs\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.039147 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-utilities\") pod \"redhat-operators-lqxfs\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.039745 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-utilities\") pod \"redhat-operators-lqxfs\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.039786 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-catalog-content\") pod \"redhat-operators-lqxfs\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.062118 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6d8r\" (UniqueName: \"kubernetes.io/projected/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-kube-api-access-m6d8r\") pod \"redhat-operators-lqxfs\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.214484 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:18 crc kubenswrapper[4816]: W0216 14:37:18.722580 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podffbbf5dc_2e5c_4d98_9b14_f449e0085049.slice/crio-54a6f1add824f6b1658d5e1c18d871ffc790e43cccc3f170e20f3c26457a5606 WatchSource:0}: Error finding container 54a6f1add824f6b1658d5e1c18d871ffc790e43cccc3f170e20f3c26457a5606: Status 404 returned error can't find the container with id 54a6f1add824f6b1658d5e1c18d871ffc790e43cccc3f170e20f3c26457a5606 Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.726390 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lqxfs"] Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.748367 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lqxfs" event={"ID":"ffbbf5dc-2e5c-4d98-9b14-f449e0085049","Type":"ContainerStarted","Data":"54a6f1add824f6b1658d5e1c18d871ffc790e43cccc3f170e20f3c26457a5606"} Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.752905 4816 generic.go:334] "Generic (PLEG): container finished" podID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerID="feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765" exitCode=0 Feb 16 14:37:18 crc kubenswrapper[4816]: I0216 14:37:18.752954 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-llgb5" event={"ID":"48f39552-d9af-404d-8d9f-24ae6c0f8bd3","Type":"ContainerDied","Data":"feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765"} Feb 16 14:37:19 crc kubenswrapper[4816]: I0216 14:37:19.764819 4816 generic.go:334] "Generic (PLEG): container finished" podID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerID="5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15" exitCode=0 Feb 16 14:37:19 crc kubenswrapper[4816]: I0216 14:37:19.764915 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lqxfs" event={"ID":"ffbbf5dc-2e5c-4d98-9b14-f449e0085049","Type":"ContainerDied","Data":"5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15"} Feb 16 14:37:19 crc kubenswrapper[4816]: I0216 14:37:19.768670 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-llgb5" event={"ID":"48f39552-d9af-404d-8d9f-24ae6c0f8bd3","Type":"ContainerStarted","Data":"f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0"} Feb 16 14:37:19 crc kubenswrapper[4816]: I0216 14:37:19.807711 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-llgb5" podStartSLOduration=2.36590215 podStartE2EDuration="4.807683107s" podCreationTimestamp="2026-02-16 14:37:15 +0000 UTC" firstStartedPulling="2026-02-16 14:37:16.717418629 +0000 UTC m=+5636.044132357" lastFinishedPulling="2026-02-16 14:37:19.159199586 +0000 UTC m=+5638.485913314" observedRunningTime="2026-02-16 14:37:19.798976959 +0000 UTC m=+5639.125690687" watchObservedRunningTime="2026-02-16 14:37:19.807683107 +0000 UTC m=+5639.134396845" Feb 16 14:37:21 crc kubenswrapper[4816]: I0216 14:37:21.789828 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lqxfs" event={"ID":"ffbbf5dc-2e5c-4d98-9b14-f449e0085049","Type":"ContainerStarted","Data":"e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394"} Feb 16 14:37:23 crc kubenswrapper[4816]: I0216 14:37:23.807968 4816 generic.go:334] "Generic (PLEG): container finished" podID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerID="e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394" exitCode=0 Feb 16 14:37:23 crc kubenswrapper[4816]: I0216 14:37:23.808053 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lqxfs" event={"ID":"ffbbf5dc-2e5c-4d98-9b14-f449e0085049","Type":"ContainerDied","Data":"e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394"} Feb 16 14:37:24 crc kubenswrapper[4816]: I0216 14:37:24.820144 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lqxfs" event={"ID":"ffbbf5dc-2e5c-4d98-9b14-f449e0085049","Type":"ContainerStarted","Data":"4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c"} Feb 16 14:37:24 crc kubenswrapper[4816]: I0216 14:37:24.842793 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lqxfs" podStartSLOduration=3.435322672 podStartE2EDuration="7.842773714s" podCreationTimestamp="2026-02-16 14:37:17 +0000 UTC" firstStartedPulling="2026-02-16 14:37:19.766977968 +0000 UTC m=+5639.093691696" lastFinishedPulling="2026-02-16 14:37:24.17442901 +0000 UTC m=+5643.501142738" observedRunningTime="2026-02-16 14:37:24.838110017 +0000 UTC m=+5644.164823745" watchObservedRunningTime="2026-02-16 14:37:24.842773714 +0000 UTC m=+5644.169487442" Feb 16 14:37:25 crc kubenswrapper[4816]: I0216 14:37:25.413085 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:25 crc kubenswrapper[4816]: I0216 14:37:25.413462 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:25 crc kubenswrapper[4816]: I0216 14:37:25.456536 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:25 crc kubenswrapper[4816]: I0216 14:37:25.873367 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:26 crc kubenswrapper[4816]: I0216 14:37:26.474357 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-llgb5"] Feb 16 14:37:27 crc kubenswrapper[4816]: I0216 14:37:27.845617 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-llgb5" podUID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerName="registry-server" containerID="cri-o://f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0" gracePeriod=2 Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.215153 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.215350 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.349053 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.472903 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-catalog-content\") pod \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.474157 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-utilities\") pod \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.474337 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68wzl\" (UniqueName: \"kubernetes.io/projected/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-kube-api-access-68wzl\") pod \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\" (UID: \"48f39552-d9af-404d-8d9f-24ae6c0f8bd3\") " Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.474978 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-utilities" (OuterVolumeSpecName: "utilities") pod "48f39552-d9af-404d-8d9f-24ae6c0f8bd3" (UID: "48f39552-d9af-404d-8d9f-24ae6c0f8bd3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.476377 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.486431 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-kube-api-access-68wzl" (OuterVolumeSpecName: "kube-api-access-68wzl") pod "48f39552-d9af-404d-8d9f-24ae6c0f8bd3" (UID: "48f39552-d9af-404d-8d9f-24ae6c0f8bd3"). InnerVolumeSpecName "kube-api-access-68wzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.500121 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48f39552-d9af-404d-8d9f-24ae6c0f8bd3" (UID: "48f39552-d9af-404d-8d9f-24ae6c0f8bd3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.578613 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.578693 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68wzl\" (UniqueName: \"kubernetes.io/projected/48f39552-d9af-404d-8d9f-24ae6c0f8bd3-kube-api-access-68wzl\") on node \"crc\" DevicePath \"\"" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.854517 4816 generic.go:334] "Generic (PLEG): container finished" podID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerID="f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0" exitCode=0 Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.854563 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-llgb5" event={"ID":"48f39552-d9af-404d-8d9f-24ae6c0f8bd3","Type":"ContainerDied","Data":"f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0"} Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.854593 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-llgb5" event={"ID":"48f39552-d9af-404d-8d9f-24ae6c0f8bd3","Type":"ContainerDied","Data":"0640379e0236928ad1c4758511721ca7e4b75ad3e6b857454759239633395997"} Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.854612 4816 scope.go:117] "RemoveContainer" containerID="f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.854623 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-llgb5" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.882256 4816 scope.go:117] "RemoveContainer" containerID="feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.900471 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-llgb5"] Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.905086 4816 scope.go:117] "RemoveContainer" containerID="60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.909510 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-llgb5"] Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.945448 4816 scope.go:117] "RemoveContainer" containerID="f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0" Feb 16 14:37:28 crc kubenswrapper[4816]: E0216 14:37:28.945951 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0\": container with ID starting with f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0 not found: ID does not exist" containerID="f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.945991 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0"} err="failed to get container status \"f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0\": rpc error: code = NotFound desc = could not find container \"f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0\": container with ID starting with f80fbcba7c9b9671d67394ed2f04c0c8c306907d0743fa2547cfee90e36f51a0 not found: ID does not exist" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.946012 4816 scope.go:117] "RemoveContainer" containerID="feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765" Feb 16 14:37:28 crc kubenswrapper[4816]: E0216 14:37:28.946367 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765\": container with ID starting with feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765 not found: ID does not exist" containerID="feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.946389 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765"} err="failed to get container status \"feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765\": rpc error: code = NotFound desc = could not find container \"feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765\": container with ID starting with feb6e446b140d2a50472d45cd96a3a6ffd1c1a6f4d1d6022e345ed6727327765 not found: ID does not exist" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.946401 4816 scope.go:117] "RemoveContainer" containerID="60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13" Feb 16 14:37:28 crc kubenswrapper[4816]: E0216 14:37:28.946853 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13\": container with ID starting with 60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13 not found: ID does not exist" containerID="60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13" Feb 16 14:37:28 crc kubenswrapper[4816]: I0216 14:37:28.946914 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13"} err="failed to get container status \"60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13\": rpc error: code = NotFound desc = could not find container \"60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13\": container with ID starting with 60376042ee4aebf813f4387cded96f51ea17b078ffd4516000fbd6d0b9befd13 not found: ID does not exist" Feb 16 14:37:29 crc kubenswrapper[4816]: I0216 14:37:29.259042 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lqxfs" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerName="registry-server" probeResult="failure" output=< Feb 16 14:37:29 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 14:37:29 crc kubenswrapper[4816]: > Feb 16 14:37:29 crc kubenswrapper[4816]: I0216 14:37:29.409237 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" path="/var/lib/kubelet/pods/48f39552-d9af-404d-8d9f-24ae6c0f8bd3/volumes" Feb 16 14:37:38 crc kubenswrapper[4816]: I0216 14:37:38.262913 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:38 crc kubenswrapper[4816]: I0216 14:37:38.320643 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:38 crc kubenswrapper[4816]: I0216 14:37:38.672755 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lqxfs"] Feb 16 14:37:39 crc kubenswrapper[4816]: I0216 14:37:39.958951 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lqxfs" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerName="registry-server" containerID="cri-o://4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c" gracePeriod=2 Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.403868 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.415435 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-utilities\") pod \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.415579 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-catalog-content\") pod \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.415704 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6d8r\" (UniqueName: \"kubernetes.io/projected/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-kube-api-access-m6d8r\") pod \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\" (UID: \"ffbbf5dc-2e5c-4d98-9b14-f449e0085049\") " Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.416333 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-utilities" (OuterVolumeSpecName: "utilities") pod "ffbbf5dc-2e5c-4d98-9b14-f449e0085049" (UID: "ffbbf5dc-2e5c-4d98-9b14-f449e0085049"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.416917 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.432919 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-kube-api-access-m6d8r" (OuterVolumeSpecName: "kube-api-access-m6d8r") pod "ffbbf5dc-2e5c-4d98-9b14-f449e0085049" (UID: "ffbbf5dc-2e5c-4d98-9b14-f449e0085049"). InnerVolumeSpecName "kube-api-access-m6d8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.517709 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6d8r\" (UniqueName: \"kubernetes.io/projected/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-kube-api-access-m6d8r\") on node \"crc\" DevicePath \"\"" Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.562314 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ffbbf5dc-2e5c-4d98-9b14-f449e0085049" (UID: "ffbbf5dc-2e5c-4d98-9b14-f449e0085049"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.618523 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffbbf5dc-2e5c-4d98-9b14-f449e0085049-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.974536 4816 generic.go:334] "Generic (PLEG): container finished" podID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerID="4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c" exitCode=0 Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.974598 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lqxfs" event={"ID":"ffbbf5dc-2e5c-4d98-9b14-f449e0085049","Type":"ContainerDied","Data":"4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c"} Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.974649 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lqxfs" event={"ID":"ffbbf5dc-2e5c-4d98-9b14-f449e0085049","Type":"ContainerDied","Data":"54a6f1add824f6b1658d5e1c18d871ffc790e43cccc3f170e20f3c26457a5606"} Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.974650 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lqxfs" Feb 16 14:37:40 crc kubenswrapper[4816]: I0216 14:37:40.974695 4816 scope.go:117] "RemoveContainer" containerID="4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c" Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.020641 4816 scope.go:117] "RemoveContainer" containerID="e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394" Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.028933 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lqxfs"] Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.041274 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lqxfs"] Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.058871 4816 scope.go:117] "RemoveContainer" containerID="5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15" Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.107479 4816 scope.go:117] "RemoveContainer" containerID="4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c" Feb 16 14:37:41 crc kubenswrapper[4816]: E0216 14:37:41.108141 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c\": container with ID starting with 4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c not found: ID does not exist" containerID="4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c" Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.108182 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c"} err="failed to get container status \"4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c\": rpc error: code = NotFound desc = could not find container \"4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c\": container with ID starting with 4f59595e720b5ef0d521310cd8377084e125e772938d0eefbf7c3f2361862a9c not found: ID does not exist" Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.108209 4816 scope.go:117] "RemoveContainer" containerID="e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394" Feb 16 14:37:41 crc kubenswrapper[4816]: E0216 14:37:41.108689 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394\": container with ID starting with e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394 not found: ID does not exist" containerID="e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394" Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.108720 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394"} err="failed to get container status \"e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394\": rpc error: code = NotFound desc = could not find container \"e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394\": container with ID starting with e178e59d46af3f15f4001375d2782e68a104d82a260846bb38441557ca19e394 not found: ID does not exist" Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.108737 4816 scope.go:117] "RemoveContainer" containerID="5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15" Feb 16 14:37:41 crc kubenswrapper[4816]: E0216 14:37:41.109021 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15\": container with ID starting with 5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15 not found: ID does not exist" containerID="5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15" Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.109051 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15"} err="failed to get container status \"5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15\": rpc error: code = NotFound desc = could not find container \"5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15\": container with ID starting with 5f100489911ae3d399ccbfa2293f8aa2577ba069d0c1e2911b7fc535137f3e15 not found: ID does not exist" Feb 16 14:37:41 crc kubenswrapper[4816]: I0216 14:37:41.411175 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" path="/var/lib/kubelet/pods/ffbbf5dc-2e5c-4d98-9b14-f449e0085049/volumes" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.092446 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lt54b"] Feb 16 14:37:51 crc kubenswrapper[4816]: E0216 14:37:51.093564 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerName="registry-server" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.093602 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerName="registry-server" Feb 16 14:37:51 crc kubenswrapper[4816]: E0216 14:37:51.093621 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerName="extract-utilities" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.093630 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerName="extract-utilities" Feb 16 14:37:51 crc kubenswrapper[4816]: E0216 14:37:51.093675 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerName="extract-content" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.093686 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerName="extract-content" Feb 16 14:37:51 crc kubenswrapper[4816]: E0216 14:37:51.093705 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerName="extract-content" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.093720 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerName="extract-content" Feb 16 14:37:51 crc kubenswrapper[4816]: E0216 14:37:51.093747 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerName="extract-utilities" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.093756 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerName="extract-utilities" Feb 16 14:37:51 crc kubenswrapper[4816]: E0216 14:37:51.093764 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerName="registry-server" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.093770 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerName="registry-server" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.093976 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="48f39552-d9af-404d-8d9f-24ae6c0f8bd3" containerName="registry-server" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.094019 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffbbf5dc-2e5c-4d98-9b14-f449e0085049" containerName="registry-server" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.094797 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.100103 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ded75e4e-2e4e-487c-a78d-1029edcba7e6-var-run\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.100238 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ded75e4e-2e4e-487c-a78d-1029edcba7e6-var-log-ovn\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.100306 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ded75e4e-2e4e-487c-a78d-1029edcba7e6-var-run-ovn\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.100364 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh8lh\" (UniqueName: \"kubernetes.io/projected/ded75e4e-2e4e-487c-a78d-1029edcba7e6-kube-api-access-xh8lh\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.100432 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ded75e4e-2e4e-487c-a78d-1029edcba7e6-scripts\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.100459 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.103697 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-v95mf"] Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.104531 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-7jhrc" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.105852 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.114613 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-v95mf"] Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.123066 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lt54b"] Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.247363 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ded75e4e-2e4e-487c-a78d-1029edcba7e6-var-log-ovn\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.247437 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ded75e4e-2e4e-487c-a78d-1029edcba7e6-var-run-ovn\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.247495 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh8lh\" (UniqueName: \"kubernetes.io/projected/ded75e4e-2e4e-487c-a78d-1029edcba7e6-kube-api-access-xh8lh\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.247548 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ded75e4e-2e4e-487c-a78d-1029edcba7e6-scripts\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.247605 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ded75e4e-2e4e-487c-a78d-1029edcba7e6-var-run\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.247991 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ded75e4e-2e4e-487c-a78d-1029edcba7e6-var-run-ovn\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.248063 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ded75e4e-2e4e-487c-a78d-1029edcba7e6-var-run\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.249443 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ded75e4e-2e4e-487c-a78d-1029edcba7e6-var-log-ovn\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.255369 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ded75e4e-2e4e-487c-a78d-1029edcba7e6-scripts\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.273601 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh8lh\" (UniqueName: \"kubernetes.io/projected/ded75e4e-2e4e-487c-a78d-1029edcba7e6-kube-api-access-xh8lh\") pod \"ovn-controller-lt54b\" (UID: \"ded75e4e-2e4e-487c-a78d-1029edcba7e6\") " pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.351889 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-var-lib\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.351986 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k76kr\" (UniqueName: \"kubernetes.io/projected/9a78f714-cb33-4f68-a282-ab390b744153-kube-api-access-k76kr\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.352204 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-etc-ovs\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.352226 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-var-run\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.352255 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9a78f714-cb33-4f68-a282-ab390b744153-scripts\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.352291 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-var-log\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.418950 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.454310 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-var-log\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.454468 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-var-lib\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.454516 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k76kr\" (UniqueName: \"kubernetes.io/projected/9a78f714-cb33-4f68-a282-ab390b744153-kube-api-access-k76kr\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.454640 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-etc-ovs\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.454760 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-var-run\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.454786 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9a78f714-cb33-4f68-a282-ab390b744153-scripts\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.454884 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-var-log\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.455027 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-etc-ovs\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.455078 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-var-lib\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.455180 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9a78f714-cb33-4f68-a282-ab390b744153-var-run\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.457811 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9a78f714-cb33-4f68-a282-ab390b744153-scripts\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.479887 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k76kr\" (UniqueName: \"kubernetes.io/projected/9a78f714-cb33-4f68-a282-ab390b744153-kube-api-access-k76kr\") pod \"ovn-controller-ovs-v95mf\" (UID: \"9a78f714-cb33-4f68-a282-ab390b744153\") " pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.743243 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:51 crc kubenswrapper[4816]: I0216 14:37:51.876552 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lt54b"] Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.130992 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lt54b" event={"ID":"ded75e4e-2e4e-487c-a78d-1029edcba7e6","Type":"ContainerStarted","Data":"40fcf552b63739d179256cb2947fbb28033694811d8c04eaabf8d63228efdb4d"} Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.532708 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-v95mf"] Feb 16 14:37:52 crc kubenswrapper[4816]: W0216 14:37:52.535186 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a78f714_cb33_4f68_a282_ab390b744153.slice/crio-e7e211f27c617e92da02e5bf3b35a305a035f83b3d280bbb1e4dbb18d059f5c0 WatchSource:0}: Error finding container e7e211f27c617e92da02e5bf3b35a305a035f83b3d280bbb1e4dbb18d059f5c0: Status 404 returned error can't find the container with id e7e211f27c617e92da02e5bf3b35a305a035f83b3d280bbb1e4dbb18d059f5c0 Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.639711 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-tb2n8"] Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.641347 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.644635 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.671324 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tb2n8"] Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.783035 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-ovn-rundir\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.783449 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-ovs-rundir\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.783537 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-config\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.783708 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97b8h\" (UniqueName: \"kubernetes.io/projected/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-kube-api-access-97b8h\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.885313 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-config\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.885395 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97b8h\" (UniqueName: \"kubernetes.io/projected/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-kube-api-access-97b8h\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.885451 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-ovn-rundir\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.885569 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-ovs-rundir\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.885970 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-ovs-rundir\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.886006 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-ovn-rundir\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.886549 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-config\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.905332 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97b8h\" (UniqueName: \"kubernetes.io/projected/20fdd2c1-4a44-4d6f-ae14-f11d556f35f7-kube-api-access-97b8h\") pod \"ovn-controller-metrics-tb2n8\" (UID: \"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7\") " pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:52 crc kubenswrapper[4816]: I0216 14:37:52.965044 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tb2n8" Feb 16 14:37:53 crc kubenswrapper[4816]: I0216 14:37:53.175178 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lt54b" event={"ID":"ded75e4e-2e4e-487c-a78d-1029edcba7e6","Type":"ContainerStarted","Data":"bcf60fa82bc315c4effbf4cc8a6f5efa9811f7620e63378f3297d69e9a87a6f9"} Feb 16 14:37:53 crc kubenswrapper[4816]: I0216 14:37:53.175561 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-lt54b" Feb 16 14:37:53 crc kubenswrapper[4816]: I0216 14:37:53.185575 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v95mf" event={"ID":"9a78f714-cb33-4f68-a282-ab390b744153","Type":"ContainerStarted","Data":"e1519b83633fb84a2eb25927f5be29ab91e25b4a8e1588bf12dc9ac59beba5ad"} Feb 16 14:37:53 crc kubenswrapper[4816]: I0216 14:37:53.185625 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v95mf" event={"ID":"9a78f714-cb33-4f68-a282-ab390b744153","Type":"ContainerStarted","Data":"e7e211f27c617e92da02e5bf3b35a305a035f83b3d280bbb1e4dbb18d059f5c0"} Feb 16 14:37:53 crc kubenswrapper[4816]: I0216 14:37:53.225205 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-lt54b" podStartSLOduration=2.225163083 podStartE2EDuration="2.225163083s" podCreationTimestamp="2026-02-16 14:37:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:37:53.201603901 +0000 UTC m=+5672.528317649" watchObservedRunningTime="2026-02-16 14:37:53.225163083 +0000 UTC m=+5672.551876811" Feb 16 14:37:53 crc kubenswrapper[4816]: W0216 14:37:53.560963 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20fdd2c1_4a44_4d6f_ae14_f11d556f35f7.slice/crio-356633ec4139d5dcaa0dfdf7dfc249163ae9fb9624517d52f424970ba9198ca9 WatchSource:0}: Error finding container 356633ec4139d5dcaa0dfdf7dfc249163ae9fb9624517d52f424970ba9198ca9: Status 404 returned error can't find the container with id 356633ec4139d5dcaa0dfdf7dfc249163ae9fb9624517d52f424970ba9198ca9 Feb 16 14:37:53 crc kubenswrapper[4816]: I0216 14:37:53.569047 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tb2n8"] Feb 16 14:37:54 crc kubenswrapper[4816]: I0216 14:37:54.043558 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-llwbm"] Feb 16 14:37:54 crc kubenswrapper[4816]: I0216 14:37:54.157719 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5ece-account-create-update-z78b8"] Feb 16 14:37:54 crc kubenswrapper[4816]: I0216 14:37:54.172703 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5ece-account-create-update-z78b8"] Feb 16 14:37:54 crc kubenswrapper[4816]: I0216 14:37:54.185430 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-llwbm"] Feb 16 14:37:54 crc kubenswrapper[4816]: I0216 14:37:54.198064 4816 generic.go:334] "Generic (PLEG): container finished" podID="9a78f714-cb33-4f68-a282-ab390b744153" containerID="e1519b83633fb84a2eb25927f5be29ab91e25b4a8e1588bf12dc9ac59beba5ad" exitCode=0 Feb 16 14:37:54 crc kubenswrapper[4816]: I0216 14:37:54.198127 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v95mf" event={"ID":"9a78f714-cb33-4f68-a282-ab390b744153","Type":"ContainerDied","Data":"e1519b83633fb84a2eb25927f5be29ab91e25b4a8e1588bf12dc9ac59beba5ad"} Feb 16 14:37:54 crc kubenswrapper[4816]: I0216 14:37:54.200800 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tb2n8" event={"ID":"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7","Type":"ContainerStarted","Data":"871e932c105cef381f0bdc9e145927e0110a9550f137c2ce269e73b804990864"} Feb 16 14:37:54 crc kubenswrapper[4816]: I0216 14:37:54.200834 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tb2n8" event={"ID":"20fdd2c1-4a44-4d6f-ae14-f11d556f35f7","Type":"ContainerStarted","Data":"356633ec4139d5dcaa0dfdf7dfc249163ae9fb9624517d52f424970ba9198ca9"} Feb 16 14:37:54 crc kubenswrapper[4816]: I0216 14:37:54.244456 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-tb2n8" podStartSLOduration=2.244433705 podStartE2EDuration="2.244433705s" podCreationTimestamp="2026-02-16 14:37:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:37:54.236477578 +0000 UTC m=+5673.563191306" watchObservedRunningTime="2026-02-16 14:37:54.244433705 +0000 UTC m=+5673.571147433" Feb 16 14:37:55 crc kubenswrapper[4816]: I0216 14:37:55.225269 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v95mf" event={"ID":"9a78f714-cb33-4f68-a282-ab390b744153","Type":"ContainerStarted","Data":"bcb01cb22dcda962edf24e327675dfa74d6795e03f6f9b703917d0c134d61d59"} Feb 16 14:37:55 crc kubenswrapper[4816]: I0216 14:37:55.225708 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v95mf" event={"ID":"9a78f714-cb33-4f68-a282-ab390b744153","Type":"ContainerStarted","Data":"3bee554d0c0d044c9886a92572ea49a099b3911d75589031ae19a573d19bea83"} Feb 16 14:37:55 crc kubenswrapper[4816]: I0216 14:37:55.250466 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-v95mf" podStartSLOduration=4.250439315 podStartE2EDuration="4.250439315s" podCreationTimestamp="2026-02-16 14:37:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:37:55.243706262 +0000 UTC m=+5674.570420000" watchObservedRunningTime="2026-02-16 14:37:55.250439315 +0000 UTC m=+5674.577153043" Feb 16 14:37:55 crc kubenswrapper[4816]: I0216 14:37:55.411633 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da8a148e-702c-4553-a7b5-29268d0d3645" path="/var/lib/kubelet/pods/da8a148e-702c-4553-a7b5-29268d0d3645/volumes" Feb 16 14:37:55 crc kubenswrapper[4816]: I0216 14:37:55.412593 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe106007-900d-482f-8029-155ca0185242" path="/var/lib/kubelet/pods/fe106007-900d-482f-8029-155ca0185242/volumes" Feb 16 14:37:56 crc kubenswrapper[4816]: I0216 14:37:56.234296 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:37:56 crc kubenswrapper[4816]: I0216 14:37:56.234345 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:38:01 crc kubenswrapper[4816]: I0216 14:38:01.045410 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-7cfgx"] Feb 16 14:38:01 crc kubenswrapper[4816]: I0216 14:38:01.058164 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-7cfgx"] Feb 16 14:38:01 crc kubenswrapper[4816]: I0216 14:38:01.412061 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b29b2b2-90b0-40c4-bcac-6b94ee2af701" path="/var/lib/kubelet/pods/1b29b2b2-90b0-40c4-bcac-6b94ee2af701/volumes" Feb 16 14:38:14 crc kubenswrapper[4816]: I0216 14:38:14.033175 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-lq2b6"] Feb 16 14:38:14 crc kubenswrapper[4816]: I0216 14:38:14.044166 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-lq2b6"] Feb 16 14:38:15 crc kubenswrapper[4816]: I0216 14:38:15.413385 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2" path="/var/lib/kubelet/pods/d2bcf70e-7911-4f9a-bf71-ae6b8b8e43b2/volumes" Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.491004 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-72x8m"] Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.493429 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.501059 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-72x8m"] Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.686035 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56kvd\" (UniqueName: \"kubernetes.io/projected/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-kube-api-access-56kvd\") pod \"octavia-db-create-72x8m\" (UID: \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\") " pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.686233 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-operator-scripts\") pod \"octavia-db-create-72x8m\" (UID: \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\") " pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.787558 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-operator-scripts\") pod \"octavia-db-create-72x8m\" (UID: \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\") " pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.787717 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56kvd\" (UniqueName: \"kubernetes.io/projected/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-kube-api-access-56kvd\") pod \"octavia-db-create-72x8m\" (UID: \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\") " pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.788424 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-operator-scripts\") pod \"octavia-db-create-72x8m\" (UID: \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\") " pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.809597 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56kvd\" (UniqueName: \"kubernetes.io/projected/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-kube-api-access-56kvd\") pod \"octavia-db-create-72x8m\" (UID: \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\") " pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:17 crc kubenswrapper[4816]: I0216 14:38:17.814485 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.289765 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-72x8m"] Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.455265 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-72x8m" event={"ID":"6179ceaa-76f9-4804-80b5-27ffb9ee36c7","Type":"ContainerStarted","Data":"2a917bc1ee172d13163faf34d83435017e77edaae3f134c14ac88bebcaefba40"} Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.715718 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-af00-account-create-update-gwzwc"] Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.717217 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.719495 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.724687 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-af00-account-create-update-gwzwc"] Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.809040 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b25217-e3df-4ded-b750-c096a13f5577-operator-scripts\") pod \"octavia-af00-account-create-update-gwzwc\" (UID: \"61b25217-e3df-4ded-b750-c096a13f5577\") " pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.809126 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c29gk\" (UniqueName: \"kubernetes.io/projected/61b25217-e3df-4ded-b750-c096a13f5577-kube-api-access-c29gk\") pod \"octavia-af00-account-create-update-gwzwc\" (UID: \"61b25217-e3df-4ded-b750-c096a13f5577\") " pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.910494 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b25217-e3df-4ded-b750-c096a13f5577-operator-scripts\") pod \"octavia-af00-account-create-update-gwzwc\" (UID: \"61b25217-e3df-4ded-b750-c096a13f5577\") " pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.910856 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c29gk\" (UniqueName: \"kubernetes.io/projected/61b25217-e3df-4ded-b750-c096a13f5577-kube-api-access-c29gk\") pod \"octavia-af00-account-create-update-gwzwc\" (UID: \"61b25217-e3df-4ded-b750-c096a13f5577\") " pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.911444 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b25217-e3df-4ded-b750-c096a13f5577-operator-scripts\") pod \"octavia-af00-account-create-update-gwzwc\" (UID: \"61b25217-e3df-4ded-b750-c096a13f5577\") " pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:18 crc kubenswrapper[4816]: I0216 14:38:18.937637 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c29gk\" (UniqueName: \"kubernetes.io/projected/61b25217-e3df-4ded-b750-c096a13f5577-kube-api-access-c29gk\") pod \"octavia-af00-account-create-update-gwzwc\" (UID: \"61b25217-e3df-4ded-b750-c096a13f5577\") " pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:19 crc kubenswrapper[4816]: I0216 14:38:19.038437 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:19 crc kubenswrapper[4816]: I0216 14:38:19.467431 4816 generic.go:334] "Generic (PLEG): container finished" podID="6179ceaa-76f9-4804-80b5-27ffb9ee36c7" containerID="6bbc01c1daa9958fffaf66c03827757762ff2aea99dd476edc8410a8e20802b3" exitCode=0 Feb 16 14:38:19 crc kubenswrapper[4816]: I0216 14:38:19.467612 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-72x8m" event={"ID":"6179ceaa-76f9-4804-80b5-27ffb9ee36c7","Type":"ContainerDied","Data":"6bbc01c1daa9958fffaf66c03827757762ff2aea99dd476edc8410a8e20802b3"} Feb 16 14:38:19 crc kubenswrapper[4816]: I0216 14:38:19.511489 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-af00-account-create-update-gwzwc"] Feb 16 14:38:19 crc kubenswrapper[4816]: W0216 14:38:19.513646 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61b25217_e3df_4ded_b750_c096a13f5577.slice/crio-7e044cd891ee87b178cd1daf4a09589fde3cb631caff24c6c1ef10e33a431b66 WatchSource:0}: Error finding container 7e044cd891ee87b178cd1daf4a09589fde3cb631caff24c6c1ef10e33a431b66: Status 404 returned error can't find the container with id 7e044cd891ee87b178cd1daf4a09589fde3cb631caff24c6c1ef10e33a431b66 Feb 16 14:38:20 crc kubenswrapper[4816]: I0216 14:38:20.478163 4816 generic.go:334] "Generic (PLEG): container finished" podID="61b25217-e3df-4ded-b750-c096a13f5577" containerID="f279ca8b948a2178045ef5a1f80201d51396c52779181569d16bbe4ae5ad2540" exitCode=0 Feb 16 14:38:20 crc kubenswrapper[4816]: I0216 14:38:20.478387 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-af00-account-create-update-gwzwc" event={"ID":"61b25217-e3df-4ded-b750-c096a13f5577","Type":"ContainerDied","Data":"f279ca8b948a2178045ef5a1f80201d51396c52779181569d16bbe4ae5ad2540"} Feb 16 14:38:20 crc kubenswrapper[4816]: I0216 14:38:20.478729 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-af00-account-create-update-gwzwc" event={"ID":"61b25217-e3df-4ded-b750-c096a13f5577","Type":"ContainerStarted","Data":"7e044cd891ee87b178cd1daf4a09589fde3cb631caff24c6c1ef10e33a431b66"} Feb 16 14:38:20 crc kubenswrapper[4816]: I0216 14:38:20.823163 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:20 crc kubenswrapper[4816]: I0216 14:38:20.951768 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-operator-scripts\") pod \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\" (UID: \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\") " Feb 16 14:38:20 crc kubenswrapper[4816]: I0216 14:38:20.951971 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56kvd\" (UniqueName: \"kubernetes.io/projected/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-kube-api-access-56kvd\") pod \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\" (UID: \"6179ceaa-76f9-4804-80b5-27ffb9ee36c7\") " Feb 16 14:38:20 crc kubenswrapper[4816]: I0216 14:38:20.952810 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6179ceaa-76f9-4804-80b5-27ffb9ee36c7" (UID: "6179ceaa-76f9-4804-80b5-27ffb9ee36c7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:38:20 crc kubenswrapper[4816]: I0216 14:38:20.956927 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-kube-api-access-56kvd" (OuterVolumeSpecName: "kube-api-access-56kvd") pod "6179ceaa-76f9-4804-80b5-27ffb9ee36c7" (UID: "6179ceaa-76f9-4804-80b5-27ffb9ee36c7"). InnerVolumeSpecName "kube-api-access-56kvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.055585 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.055984 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56kvd\" (UniqueName: \"kubernetes.io/projected/6179ceaa-76f9-4804-80b5-27ffb9ee36c7-kube-api-access-56kvd\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.488757 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-72x8m" event={"ID":"6179ceaa-76f9-4804-80b5-27ffb9ee36c7","Type":"ContainerDied","Data":"2a917bc1ee172d13163faf34d83435017e77edaae3f134c14ac88bebcaefba40"} Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.490074 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a917bc1ee172d13163faf34d83435017e77edaae3f134c14ac88bebcaefba40" Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.488858 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-72x8m" Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.868096 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.973057 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c29gk\" (UniqueName: \"kubernetes.io/projected/61b25217-e3df-4ded-b750-c096a13f5577-kube-api-access-c29gk\") pod \"61b25217-e3df-4ded-b750-c096a13f5577\" (UID: \"61b25217-e3df-4ded-b750-c096a13f5577\") " Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.973384 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b25217-e3df-4ded-b750-c096a13f5577-operator-scripts\") pod \"61b25217-e3df-4ded-b750-c096a13f5577\" (UID: \"61b25217-e3df-4ded-b750-c096a13f5577\") " Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.973833 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61b25217-e3df-4ded-b750-c096a13f5577-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "61b25217-e3df-4ded-b750-c096a13f5577" (UID: "61b25217-e3df-4ded-b750-c096a13f5577"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.974210 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61b25217-e3df-4ded-b750-c096a13f5577-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:21 crc kubenswrapper[4816]: I0216 14:38:21.978835 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61b25217-e3df-4ded-b750-c096a13f5577-kube-api-access-c29gk" (OuterVolumeSpecName: "kube-api-access-c29gk") pod "61b25217-e3df-4ded-b750-c096a13f5577" (UID: "61b25217-e3df-4ded-b750-c096a13f5577"). InnerVolumeSpecName "kube-api-access-c29gk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:38:22 crc kubenswrapper[4816]: I0216 14:38:22.076109 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c29gk\" (UniqueName: \"kubernetes.io/projected/61b25217-e3df-4ded-b750-c096a13f5577-kube-api-access-c29gk\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:22 crc kubenswrapper[4816]: I0216 14:38:22.502668 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-af00-account-create-update-gwzwc" event={"ID":"61b25217-e3df-4ded-b750-c096a13f5577","Type":"ContainerDied","Data":"7e044cd891ee87b178cd1daf4a09589fde3cb631caff24c6c1ef10e33a431b66"} Feb 16 14:38:22 crc kubenswrapper[4816]: I0216 14:38:22.502722 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e044cd891ee87b178cd1daf4a09589fde3cb631caff24c6c1ef10e33a431b66" Feb 16 14:38:22 crc kubenswrapper[4816]: I0216 14:38:22.502806 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-af00-account-create-update-gwzwc" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.062682 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-d954w"] Feb 16 14:38:24 crc kubenswrapper[4816]: E0216 14:38:24.063504 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6179ceaa-76f9-4804-80b5-27ffb9ee36c7" containerName="mariadb-database-create" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.063525 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6179ceaa-76f9-4804-80b5-27ffb9ee36c7" containerName="mariadb-database-create" Feb 16 14:38:24 crc kubenswrapper[4816]: E0216 14:38:24.063553 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61b25217-e3df-4ded-b750-c096a13f5577" containerName="mariadb-account-create-update" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.063559 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="61b25217-e3df-4ded-b750-c096a13f5577" containerName="mariadb-account-create-update" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.063769 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="61b25217-e3df-4ded-b750-c096a13f5577" containerName="mariadb-account-create-update" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.063784 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="6179ceaa-76f9-4804-80b5-27ffb9ee36c7" containerName="mariadb-database-create" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.064404 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.073670 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-d954w"] Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.213255 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdqlw\" (UniqueName: \"kubernetes.io/projected/58a861c3-95d2-47a1-a7b6-900d06b15b69-kube-api-access-pdqlw\") pod \"octavia-persistence-db-create-d954w\" (UID: \"58a861c3-95d2-47a1-a7b6-900d06b15b69\") " pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.213339 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a861c3-95d2-47a1-a7b6-900d06b15b69-operator-scripts\") pod \"octavia-persistence-db-create-d954w\" (UID: \"58a861c3-95d2-47a1-a7b6-900d06b15b69\") " pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.314746 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdqlw\" (UniqueName: \"kubernetes.io/projected/58a861c3-95d2-47a1-a7b6-900d06b15b69-kube-api-access-pdqlw\") pod \"octavia-persistence-db-create-d954w\" (UID: \"58a861c3-95d2-47a1-a7b6-900d06b15b69\") " pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.314819 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a861c3-95d2-47a1-a7b6-900d06b15b69-operator-scripts\") pod \"octavia-persistence-db-create-d954w\" (UID: \"58a861c3-95d2-47a1-a7b6-900d06b15b69\") " pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.315632 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a861c3-95d2-47a1-a7b6-900d06b15b69-operator-scripts\") pod \"octavia-persistence-db-create-d954w\" (UID: \"58a861c3-95d2-47a1-a7b6-900d06b15b69\") " pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.334404 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdqlw\" (UniqueName: \"kubernetes.io/projected/58a861c3-95d2-47a1-a7b6-900d06b15b69-kube-api-access-pdqlw\") pod \"octavia-persistence-db-create-d954w\" (UID: \"58a861c3-95d2-47a1-a7b6-900d06b15b69\") " pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.384182 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.593080 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-8072-account-create-update-24blg"] Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.595486 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.597840 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.604778 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-8072-account-create-update-24blg"] Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.723588 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9115f53-1e62-4807-b164-bd11a880da8f-operator-scripts\") pod \"octavia-8072-account-create-update-24blg\" (UID: \"f9115f53-1e62-4807-b164-bd11a880da8f\") " pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.723715 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vb69\" (UniqueName: \"kubernetes.io/projected/f9115f53-1e62-4807-b164-bd11a880da8f-kube-api-access-8vb69\") pod \"octavia-8072-account-create-update-24blg\" (UID: \"f9115f53-1e62-4807-b164-bd11a880da8f\") " pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.825460 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9115f53-1e62-4807-b164-bd11a880da8f-operator-scripts\") pod \"octavia-8072-account-create-update-24blg\" (UID: \"f9115f53-1e62-4807-b164-bd11a880da8f\") " pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.825715 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vb69\" (UniqueName: \"kubernetes.io/projected/f9115f53-1e62-4807-b164-bd11a880da8f-kube-api-access-8vb69\") pod \"octavia-8072-account-create-update-24blg\" (UID: \"f9115f53-1e62-4807-b164-bd11a880da8f\") " pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.826359 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9115f53-1e62-4807-b164-bd11a880da8f-operator-scripts\") pod \"octavia-8072-account-create-update-24blg\" (UID: \"f9115f53-1e62-4807-b164-bd11a880da8f\") " pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.843942 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vb69\" (UniqueName: \"kubernetes.io/projected/f9115f53-1e62-4807-b164-bd11a880da8f-kube-api-access-8vb69\") pod \"octavia-8072-account-create-update-24blg\" (UID: \"f9115f53-1e62-4807-b164-bd11a880da8f\") " pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.898445 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-d954w"] Feb 16 14:38:24 crc kubenswrapper[4816]: I0216 14:38:24.919252 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:25 crc kubenswrapper[4816]: I0216 14:38:25.431049 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-8072-account-create-update-24blg"] Feb 16 14:38:25 crc kubenswrapper[4816]: I0216 14:38:25.530186 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-8072-account-create-update-24blg" event={"ID":"f9115f53-1e62-4807-b164-bd11a880da8f","Type":"ContainerStarted","Data":"fcd883bd34664b33da0147d5a450d13249bb3c3ba52cb394ee031cdba5c59505"} Feb 16 14:38:25 crc kubenswrapper[4816]: I0216 14:38:25.532425 4816 generic.go:334] "Generic (PLEG): container finished" podID="58a861c3-95d2-47a1-a7b6-900d06b15b69" containerID="a71981ce4c0a582e96123fa0180da803976bef32297bd0ac457e5582fb143729" exitCode=0 Feb 16 14:38:25 crc kubenswrapper[4816]: I0216 14:38:25.532458 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-d954w" event={"ID":"58a861c3-95d2-47a1-a7b6-900d06b15b69","Type":"ContainerDied","Data":"a71981ce4c0a582e96123fa0180da803976bef32297bd0ac457e5582fb143729"} Feb 16 14:38:25 crc kubenswrapper[4816]: I0216 14:38:25.532475 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-d954w" event={"ID":"58a861c3-95d2-47a1-a7b6-900d06b15b69","Type":"ContainerStarted","Data":"a4522966a73256ac7c5bf9fd5ddd704d1dbecbf239882f4f2b0e7f54388a2fa7"} Feb 16 14:38:26 crc kubenswrapper[4816]: I0216 14:38:26.466049 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-lt54b" podUID="ded75e4e-2e4e-487c-a78d-1029edcba7e6" containerName="ovn-controller" probeResult="failure" output=< Feb 16 14:38:26 crc kubenswrapper[4816]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 16 14:38:26 crc kubenswrapper[4816]: > Feb 16 14:38:26 crc kubenswrapper[4816]: I0216 14:38:26.541899 4816 generic.go:334] "Generic (PLEG): container finished" podID="f9115f53-1e62-4807-b164-bd11a880da8f" containerID="8c4f450e9fc4402d80e6d99c33dc723507a9a4ba7ce445dc2ca6d3e71c41e13a" exitCode=0 Feb 16 14:38:26 crc kubenswrapper[4816]: I0216 14:38:26.541957 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-8072-account-create-update-24blg" event={"ID":"f9115f53-1e62-4807-b164-bd11a880da8f","Type":"ContainerDied","Data":"8c4f450e9fc4402d80e6d99c33dc723507a9a4ba7ce445dc2ca6d3e71c41e13a"} Feb 16 14:38:26 crc kubenswrapper[4816]: I0216 14:38:26.799288 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:38:26 crc kubenswrapper[4816]: I0216 14:38:26.799882 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-v95mf" Feb 16 14:38:26 crc kubenswrapper[4816]: I0216 14:38:26.938876 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:26 crc kubenswrapper[4816]: I0216 14:38:26.996563 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lt54b-config-s9r8f"] Feb 16 14:38:26 crc kubenswrapper[4816]: E0216 14:38:26.996928 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58a861c3-95d2-47a1-a7b6-900d06b15b69" containerName="mariadb-database-create" Feb 16 14:38:26 crc kubenswrapper[4816]: I0216 14:38:26.996945 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="58a861c3-95d2-47a1-a7b6-900d06b15b69" containerName="mariadb-database-create" Feb 16 14:38:26 crc kubenswrapper[4816]: I0216 14:38:26.997132 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="58a861c3-95d2-47a1-a7b6-900d06b15b69" containerName="mariadb-database-create" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.002846 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.005983 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.021056 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lt54b-config-s9r8f"] Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.067480 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a861c3-95d2-47a1-a7b6-900d06b15b69-operator-scripts\") pod \"58a861c3-95d2-47a1-a7b6-900d06b15b69\" (UID: \"58a861c3-95d2-47a1-a7b6-900d06b15b69\") " Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.067526 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdqlw\" (UniqueName: \"kubernetes.io/projected/58a861c3-95d2-47a1-a7b6-900d06b15b69-kube-api-access-pdqlw\") pod \"58a861c3-95d2-47a1-a7b6-900d06b15b69\" (UID: \"58a861c3-95d2-47a1-a7b6-900d06b15b69\") " Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.068166 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58a861c3-95d2-47a1-a7b6-900d06b15b69-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "58a861c3-95d2-47a1-a7b6-900d06b15b69" (UID: "58a861c3-95d2-47a1-a7b6-900d06b15b69"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.069327 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a861c3-95d2-47a1-a7b6-900d06b15b69-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.101936 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58a861c3-95d2-47a1-a7b6-900d06b15b69-kube-api-access-pdqlw" (OuterVolumeSpecName: "kube-api-access-pdqlw") pod "58a861c3-95d2-47a1-a7b6-900d06b15b69" (UID: "58a861c3-95d2-47a1-a7b6-900d06b15b69"). InnerVolumeSpecName "kube-api-access-pdqlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.170544 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-scripts\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.170614 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run-ovn\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.170688 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-log-ovn\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.170760 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.170789 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-additional-scripts\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.170909 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w52p4\" (UniqueName: \"kubernetes.io/projected/54dc0080-f0d2-4b78-92b0-78ef77225bb1-kube-api-access-w52p4\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.170973 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdqlw\" (UniqueName: \"kubernetes.io/projected/58a861c3-95d2-47a1-a7b6-900d06b15b69-kube-api-access-pdqlw\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.273009 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-scripts\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.273060 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run-ovn\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.273100 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-log-ovn\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.273181 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.273239 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-additional-scripts\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.273471 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.273471 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-log-ovn\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.273526 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run-ovn\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.273752 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w52p4\" (UniqueName: \"kubernetes.io/projected/54dc0080-f0d2-4b78-92b0-78ef77225bb1-kube-api-access-w52p4\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.274380 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-additional-scripts\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.275069 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-scripts\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.294682 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w52p4\" (UniqueName: \"kubernetes.io/projected/54dc0080-f0d2-4b78-92b0-78ef77225bb1-kube-api-access-w52p4\") pod \"ovn-controller-lt54b-config-s9r8f\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.323076 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.570716 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-d954w" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.570746 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-d954w" event={"ID":"58a861c3-95d2-47a1-a7b6-900d06b15b69","Type":"ContainerDied","Data":"a4522966a73256ac7c5bf9fd5ddd704d1dbecbf239882f4f2b0e7f54388a2fa7"} Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.571510 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4522966a73256ac7c5bf9fd5ddd704d1dbecbf239882f4f2b0e7f54388a2fa7" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.775252 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lt54b-config-s9r8f"] Feb 16 14:38:27 crc kubenswrapper[4816]: W0216 14:38:27.784880 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54dc0080_f0d2_4b78_92b0_78ef77225bb1.slice/crio-f6f8b3e35321c0b2d1ff3977918542b81da4ed2b0dced6424d79a474b9fa6eac WatchSource:0}: Error finding container f6f8b3e35321c0b2d1ff3977918542b81da4ed2b0dced6424d79a474b9fa6eac: Status 404 returned error can't find the container with id f6f8b3e35321c0b2d1ff3977918542b81da4ed2b0dced6424d79a474b9fa6eac Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.834580 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.989351 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vb69\" (UniqueName: \"kubernetes.io/projected/f9115f53-1e62-4807-b164-bd11a880da8f-kube-api-access-8vb69\") pod \"f9115f53-1e62-4807-b164-bd11a880da8f\" (UID: \"f9115f53-1e62-4807-b164-bd11a880da8f\") " Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.989780 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9115f53-1e62-4807-b164-bd11a880da8f-operator-scripts\") pod \"f9115f53-1e62-4807-b164-bd11a880da8f\" (UID: \"f9115f53-1e62-4807-b164-bd11a880da8f\") " Feb 16 14:38:27 crc kubenswrapper[4816]: I0216 14:38:27.991051 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9115f53-1e62-4807-b164-bd11a880da8f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f9115f53-1e62-4807-b164-bd11a880da8f" (UID: "f9115f53-1e62-4807-b164-bd11a880da8f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.002975 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9115f53-1e62-4807-b164-bd11a880da8f-kube-api-access-8vb69" (OuterVolumeSpecName: "kube-api-access-8vb69") pod "f9115f53-1e62-4807-b164-bd11a880da8f" (UID: "f9115f53-1e62-4807-b164-bd11a880da8f"). InnerVolumeSpecName "kube-api-access-8vb69". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.092794 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vb69\" (UniqueName: \"kubernetes.io/projected/f9115f53-1e62-4807-b164-bd11a880da8f-kube-api-access-8vb69\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.092851 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9115f53-1e62-4807-b164-bd11a880da8f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.357129 4816 scope.go:117] "RemoveContainer" containerID="439a80fcf9d208a6d9421bfdb52c81440d46dc5b6f630aae87abe43caaa71e83" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.403286 4816 scope.go:117] "RemoveContainer" containerID="18660c3f9adf0ee465da93e3bdabea8404f0096997994bc6f39d26d029f0b5fa" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.459154 4816 scope.go:117] "RemoveContainer" containerID="29dd50f0854ebcc63c2e1528c48488fcb5f16d73b9a034d8cac0d75db7e04b24" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.505238 4816 scope.go:117] "RemoveContainer" containerID="24ce500bee6ff0ad40be30416fc9e59716e8847db51a760725983c4ac75ab433" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.588620 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-8072-account-create-update-24blg" event={"ID":"f9115f53-1e62-4807-b164-bd11a880da8f","Type":"ContainerDied","Data":"fcd883bd34664b33da0147d5a450d13249bb3c3ba52cb394ee031cdba5c59505"} Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.588668 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8072-account-create-update-24blg" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.588680 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcd883bd34664b33da0147d5a450d13249bb3c3ba52cb394ee031cdba5c59505" Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.590967 4816 generic.go:334] "Generic (PLEG): container finished" podID="54dc0080-f0d2-4b78-92b0-78ef77225bb1" containerID="a7120fc8ec9a659822ccd123c0191d630554675320e62da2847dcbcf8b44e822" exitCode=0 Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.591041 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lt54b-config-s9r8f" event={"ID":"54dc0080-f0d2-4b78-92b0-78ef77225bb1","Type":"ContainerDied","Data":"a7120fc8ec9a659822ccd123c0191d630554675320e62da2847dcbcf8b44e822"} Feb 16 14:38:28 crc kubenswrapper[4816]: I0216 14:38:28.591070 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lt54b-config-s9r8f" event={"ID":"54dc0080-f0d2-4b78-92b0-78ef77225bb1","Type":"ContainerStarted","Data":"f6f8b3e35321c0b2d1ff3977918542b81da4ed2b0dced6424d79a474b9fa6eac"} Feb 16 14:38:28 crc kubenswrapper[4816]: E0216 14:38:28.904876 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice/crio-2a917bc1ee172d13163faf34d83435017e77edaae3f134c14ac88bebcaefba40\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:38:29 crc kubenswrapper[4816]: I0216 14:38:29.931056 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.029633 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run-ovn\") pod \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.030189 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w52p4\" (UniqueName: \"kubernetes.io/projected/54dc0080-f0d2-4b78-92b0-78ef77225bb1-kube-api-access-w52p4\") pod \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.029728 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "54dc0080-f0d2-4b78-92b0-78ef77225bb1" (UID: "54dc0080-f0d2-4b78-92b0-78ef77225bb1"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.030388 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-scripts\") pod \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.030475 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-additional-scripts\") pod \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.030553 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run\") pod \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.030678 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-log-ovn\") pod \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\" (UID: \"54dc0080-f0d2-4b78-92b0-78ef77225bb1\") " Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.031404 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "54dc0080-f0d2-4b78-92b0-78ef77225bb1" (UID: "54dc0080-f0d2-4b78-92b0-78ef77225bb1"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.031436 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run" (OuterVolumeSpecName: "var-run") pod "54dc0080-f0d2-4b78-92b0-78ef77225bb1" (UID: "54dc0080-f0d2-4b78-92b0-78ef77225bb1"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.031453 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "54dc0080-f0d2-4b78-92b0-78ef77225bb1" (UID: "54dc0080-f0d2-4b78-92b0-78ef77225bb1"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.031673 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-scripts" (OuterVolumeSpecName: "scripts") pod "54dc0080-f0d2-4b78-92b0-78ef77225bb1" (UID: "54dc0080-f0d2-4b78-92b0-78ef77225bb1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.031684 4816 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.031824 4816 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.031890 4816 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-run\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.031965 4816 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54dc0080-f0d2-4b78-92b0-78ef77225bb1-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.037412 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54dc0080-f0d2-4b78-92b0-78ef77225bb1-kube-api-access-w52p4" (OuterVolumeSpecName: "kube-api-access-w52p4") pod "54dc0080-f0d2-4b78-92b0-78ef77225bb1" (UID: "54dc0080-f0d2-4b78-92b0-78ef77225bb1"). InnerVolumeSpecName "kube-api-access-w52p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.134268 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w52p4\" (UniqueName: \"kubernetes.io/projected/54dc0080-f0d2-4b78-92b0-78ef77225bb1-kube-api-access-w52p4\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.134327 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54dc0080-f0d2-4b78-92b0-78ef77225bb1-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.611103 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lt54b-config-s9r8f" event={"ID":"54dc0080-f0d2-4b78-92b0-78ef77225bb1","Type":"ContainerDied","Data":"f6f8b3e35321c0b2d1ff3977918542b81da4ed2b0dced6424d79a474b9fa6eac"} Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.611148 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b-config-s9r8f" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.611159 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6f8b3e35321c0b2d1ff3977918542b81da4ed2b0dced6424d79a474b9fa6eac" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.753582 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-7859468b99-2chct"] Feb 16 14:38:30 crc kubenswrapper[4816]: E0216 14:38:30.754835 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54dc0080-f0d2-4b78-92b0-78ef77225bb1" containerName="ovn-config" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.754869 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="54dc0080-f0d2-4b78-92b0-78ef77225bb1" containerName="ovn-config" Feb 16 14:38:30 crc kubenswrapper[4816]: E0216 14:38:30.754909 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9115f53-1e62-4807-b164-bd11a880da8f" containerName="mariadb-account-create-update" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.754919 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9115f53-1e62-4807-b164-bd11a880da8f" containerName="mariadb-account-create-update" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.755193 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="54dc0080-f0d2-4b78-92b0-78ef77225bb1" containerName="ovn-config" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.755244 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9115f53-1e62-4807-b164-bd11a880da8f" containerName="mariadb-account-create-update" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.760306 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.762816 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-64mxt" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.763070 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.768466 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.769144 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-7859468b99-2chct"] Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.846853 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f4eb8fe-9b0e-440f-9013-9af01736386f-combined-ca-bundle\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.846928 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3f4eb8fe-9b0e-440f-9013-9af01736386f-config-data-merged\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.847060 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f4eb8fe-9b0e-440f-9013-9af01736386f-scripts\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.847161 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/3f4eb8fe-9b0e-440f-9013-9af01736386f-octavia-run\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.847207 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f4eb8fe-9b0e-440f-9013-9af01736386f-config-data\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.948719 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/3f4eb8fe-9b0e-440f-9013-9af01736386f-octavia-run\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.948780 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f4eb8fe-9b0e-440f-9013-9af01736386f-config-data\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.948809 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f4eb8fe-9b0e-440f-9013-9af01736386f-combined-ca-bundle\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.948857 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3f4eb8fe-9b0e-440f-9013-9af01736386f-config-data-merged\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.948971 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f4eb8fe-9b0e-440f-9013-9af01736386f-scripts\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.949120 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/3f4eb8fe-9b0e-440f-9013-9af01736386f-octavia-run\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.949767 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3f4eb8fe-9b0e-440f-9013-9af01736386f-config-data-merged\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.952738 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f4eb8fe-9b0e-440f-9013-9af01736386f-scripts\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.952859 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f4eb8fe-9b0e-440f-9013-9af01736386f-combined-ca-bundle\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:30 crc kubenswrapper[4816]: I0216 14:38:30.953607 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f4eb8fe-9b0e-440f-9013-9af01736386f-config-data\") pod \"octavia-api-7859468b99-2chct\" (UID: \"3f4eb8fe-9b0e-440f-9013-9af01736386f\") " pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.051056 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-lt54b-config-s9r8f"] Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.076995 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-lt54b-config-s9r8f"] Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.080068 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.140123 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lt54b-config-lbgh6"] Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.144012 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.147959 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.164206 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lt54b-config-lbgh6"] Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.257786 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run-ovn\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.257855 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.257903 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8cwt\" (UniqueName: \"kubernetes.io/projected/abafa473-8d18-4348-83c5-e74f576a35d8-kube-api-access-g8cwt\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.257941 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-additional-scripts\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.258017 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-scripts\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.258094 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-log-ovn\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.366205 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-scripts\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.366304 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-log-ovn\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.366335 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run-ovn\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.366359 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.366387 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8cwt\" (UniqueName: \"kubernetes.io/projected/abafa473-8d18-4348-83c5-e74f576a35d8-kube-api-access-g8cwt\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.366417 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-additional-scripts\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.367560 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-additional-scripts\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.368129 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-log-ovn\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.368179 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run-ovn\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.368214 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.368819 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-scripts\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.386858 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8cwt\" (UniqueName: \"kubernetes.io/projected/abafa473-8d18-4348-83c5-e74f576a35d8-kube-api-access-g8cwt\") pod \"ovn-controller-lt54b-config-lbgh6\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.412587 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54dc0080-f0d2-4b78-92b0-78ef77225bb1" path="/var/lib/kubelet/pods/54dc0080-f0d2-4b78-92b0-78ef77225bb1/volumes" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.477267 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-lt54b" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.566204 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:31 crc kubenswrapper[4816]: I0216 14:38:31.734006 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-7859468b99-2chct"] Feb 16 14:38:32 crc kubenswrapper[4816]: I0216 14:38:32.158112 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lt54b-config-lbgh6"] Feb 16 14:38:32 crc kubenswrapper[4816]: I0216 14:38:32.635137 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-7859468b99-2chct" event={"ID":"3f4eb8fe-9b0e-440f-9013-9af01736386f","Type":"ContainerStarted","Data":"b02e82ff45ae589de7e558f486683cdb019c3f1d4dd53bf45ef9a275863d0983"} Feb 16 14:38:32 crc kubenswrapper[4816]: I0216 14:38:32.637531 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lt54b-config-lbgh6" event={"ID":"abafa473-8d18-4348-83c5-e74f576a35d8","Type":"ContainerStarted","Data":"3d2aca137654cf47646b404af68c7e5bb03a93e9985f0b52a42c885b98fc1c31"} Feb 16 14:38:32 crc kubenswrapper[4816]: I0216 14:38:32.637570 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lt54b-config-lbgh6" event={"ID":"abafa473-8d18-4348-83c5-e74f576a35d8","Type":"ContainerStarted","Data":"3181d6549d1b770d0d52c25e7d656ff172d2498edb6dd97c300d8e526c2c34ef"} Feb 16 14:38:32 crc kubenswrapper[4816]: I0216 14:38:32.664471 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-lt54b-config-lbgh6" podStartSLOduration=1.664447573 podStartE2EDuration="1.664447573s" podCreationTimestamp="2026-02-16 14:38:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:38:32.655865689 +0000 UTC m=+5711.982579417" watchObservedRunningTime="2026-02-16 14:38:32.664447573 +0000 UTC m=+5711.991161311" Feb 16 14:38:33 crc kubenswrapper[4816]: I0216 14:38:33.649040 4816 generic.go:334] "Generic (PLEG): container finished" podID="abafa473-8d18-4348-83c5-e74f576a35d8" containerID="3d2aca137654cf47646b404af68c7e5bb03a93e9985f0b52a42c885b98fc1c31" exitCode=0 Feb 16 14:38:33 crc kubenswrapper[4816]: I0216 14:38:33.649373 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lt54b-config-lbgh6" event={"ID":"abafa473-8d18-4348-83c5-e74f576a35d8","Type":"ContainerDied","Data":"3d2aca137654cf47646b404af68c7e5bb03a93e9985f0b52a42c885b98fc1c31"} Feb 16 14:38:39 crc kubenswrapper[4816]: E0216 14:38:39.155427 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice/crio-2a917bc1ee172d13163faf34d83435017e77edaae3f134c14ac88bebcaefba40\": RecentStats: unable to find data in memory cache]" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.449010 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.589974 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-log-ovn\") pod \"abafa473-8d18-4348-83c5-e74f576a35d8\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.590364 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run-ovn\") pod \"abafa473-8d18-4348-83c5-e74f576a35d8\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.590404 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8cwt\" (UniqueName: \"kubernetes.io/projected/abafa473-8d18-4348-83c5-e74f576a35d8-kube-api-access-g8cwt\") pod \"abafa473-8d18-4348-83c5-e74f576a35d8\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.590513 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-scripts\") pod \"abafa473-8d18-4348-83c5-e74f576a35d8\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.590569 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run\") pod \"abafa473-8d18-4348-83c5-e74f576a35d8\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.590611 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-additional-scripts\") pod \"abafa473-8d18-4348-83c5-e74f576a35d8\" (UID: \"abafa473-8d18-4348-83c5-e74f576a35d8\") " Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.590154 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "abafa473-8d18-4348-83c5-e74f576a35d8" (UID: "abafa473-8d18-4348-83c5-e74f576a35d8"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.590829 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "abafa473-8d18-4348-83c5-e74f576a35d8" (UID: "abafa473-8d18-4348-83c5-e74f576a35d8"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.590854 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run" (OuterVolumeSpecName: "var-run") pod "abafa473-8d18-4348-83c5-e74f576a35d8" (UID: "abafa473-8d18-4348-83c5-e74f576a35d8"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.591196 4816 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.591222 4816 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.591233 4816 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/abafa473-8d18-4348-83c5-e74f576a35d8-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.591499 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "abafa473-8d18-4348-83c5-e74f576a35d8" (UID: "abafa473-8d18-4348-83c5-e74f576a35d8"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.591924 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-scripts" (OuterVolumeSpecName: "scripts") pod "abafa473-8d18-4348-83c5-e74f576a35d8" (UID: "abafa473-8d18-4348-83c5-e74f576a35d8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.595485 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abafa473-8d18-4348-83c5-e74f576a35d8-kube-api-access-g8cwt" (OuterVolumeSpecName: "kube-api-access-g8cwt") pod "abafa473-8d18-4348-83c5-e74f576a35d8" (UID: "abafa473-8d18-4348-83c5-e74f576a35d8"). InnerVolumeSpecName "kube-api-access-g8cwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.693140 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8cwt\" (UniqueName: \"kubernetes.io/projected/abafa473-8d18-4348-83c5-e74f576a35d8-kube-api-access-g8cwt\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.693172 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.693185 4816 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/abafa473-8d18-4348-83c5-e74f576a35d8-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.736865 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lt54b-config-lbgh6" event={"ID":"abafa473-8d18-4348-83c5-e74f576a35d8","Type":"ContainerDied","Data":"3181d6549d1b770d0d52c25e7d656ff172d2498edb6dd97c300d8e526c2c34ef"} Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.736895 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lt54b-config-lbgh6" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.736917 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3181d6549d1b770d0d52c25e7d656ff172d2498edb6dd97c300d8e526c2c34ef" Feb 16 14:38:41 crc kubenswrapper[4816]: I0216 14:38:41.738502 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-7859468b99-2chct" event={"ID":"3f4eb8fe-9b0e-440f-9013-9af01736386f","Type":"ContainerStarted","Data":"b74a61b795be61d93753ac6a680832afdcf17cf341ee66a8c578ffc6a5393bb5"} Feb 16 14:38:42 crc kubenswrapper[4816]: I0216 14:38:42.535398 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-lt54b-config-lbgh6"] Feb 16 14:38:42 crc kubenswrapper[4816]: I0216 14:38:42.548510 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-lt54b-config-lbgh6"] Feb 16 14:38:42 crc kubenswrapper[4816]: I0216 14:38:42.749590 4816 generic.go:334] "Generic (PLEG): container finished" podID="3f4eb8fe-9b0e-440f-9013-9af01736386f" containerID="b74a61b795be61d93753ac6a680832afdcf17cf341ee66a8c578ffc6a5393bb5" exitCode=0 Feb 16 14:38:42 crc kubenswrapper[4816]: I0216 14:38:42.749646 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-7859468b99-2chct" event={"ID":"3f4eb8fe-9b0e-440f-9013-9af01736386f","Type":"ContainerDied","Data":"b74a61b795be61d93753ac6a680832afdcf17cf341ee66a8c578ffc6a5393bb5"} Feb 16 14:38:43 crc kubenswrapper[4816]: I0216 14:38:43.410919 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abafa473-8d18-4348-83c5-e74f576a35d8" path="/var/lib/kubelet/pods/abafa473-8d18-4348-83c5-e74f576a35d8/volumes" Feb 16 14:38:43 crc kubenswrapper[4816]: I0216 14:38:43.760514 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-7859468b99-2chct" event={"ID":"3f4eb8fe-9b0e-440f-9013-9af01736386f","Type":"ContainerStarted","Data":"759a56f68f0779e5647e53a9c958886778a3c8562711fc1c2a26f682e3dd9c4d"} Feb 16 14:38:43 crc kubenswrapper[4816]: I0216 14:38:43.760572 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-7859468b99-2chct" event={"ID":"3f4eb8fe-9b0e-440f-9013-9af01736386f","Type":"ContainerStarted","Data":"c03a7d65fa21aa1631b14665915b6ca9250478827b051ac27d23c274ee41b823"} Feb 16 14:38:43 crc kubenswrapper[4816]: I0216 14:38:43.760796 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:43 crc kubenswrapper[4816]: I0216 14:38:43.760826 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:38:43 crc kubenswrapper[4816]: I0216 14:38:43.790898 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-7859468b99-2chct" podStartSLOduration=4.145711351 podStartE2EDuration="13.790865209s" podCreationTimestamp="2026-02-16 14:38:30 +0000 UTC" firstStartedPulling="2026-02-16 14:38:31.74968247 +0000 UTC m=+5711.076396198" lastFinishedPulling="2026-02-16 14:38:41.394836328 +0000 UTC m=+5720.721550056" observedRunningTime="2026-02-16 14:38:43.784264678 +0000 UTC m=+5723.110978416" watchObservedRunningTime="2026-02-16 14:38:43.790865209 +0000 UTC m=+5723.117578937" Feb 16 14:38:49 crc kubenswrapper[4816]: E0216 14:38:49.443833 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice/crio-2a917bc1ee172d13163faf34d83435017e77edaae3f134c14ac88bebcaefba40\": RecentStats: unable to find data in memory cache]" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.468461 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-2brcl"] Feb 16 14:38:51 crc kubenswrapper[4816]: E0216 14:38:51.469307 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abafa473-8d18-4348-83c5-e74f576a35d8" containerName="ovn-config" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.469327 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="abafa473-8d18-4348-83c5-e74f576a35d8" containerName="ovn-config" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.469553 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="abafa473-8d18-4348-83c5-e74f576a35d8" containerName="ovn-config" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.470840 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.473224 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.473923 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.474348 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.478351 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-2brcl"] Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.577502 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/dda86147-89af-40b9-ad51-1d952a483747-hm-ports\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.577586 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dda86147-89af-40b9-ad51-1d952a483747-config-data\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.577723 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dda86147-89af-40b9-ad51-1d952a483747-scripts\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.577907 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/dda86147-89af-40b9-ad51-1d952a483747-config-data-merged\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.680112 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/dda86147-89af-40b9-ad51-1d952a483747-config-data-merged\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.680161 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/dda86147-89af-40b9-ad51-1d952a483747-hm-ports\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.680211 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dda86147-89af-40b9-ad51-1d952a483747-config-data\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.680288 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dda86147-89af-40b9-ad51-1d952a483747-scripts\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.682437 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/dda86147-89af-40b9-ad51-1d952a483747-config-data-merged\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.683045 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/dda86147-89af-40b9-ad51-1d952a483747-hm-ports\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.688553 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dda86147-89af-40b9-ad51-1d952a483747-scripts\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.689389 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dda86147-89af-40b9-ad51-1d952a483747-config-data\") pod \"octavia-rsyslog-2brcl\" (UID: \"dda86147-89af-40b9-ad51-1d952a483747\") " pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:51 crc kubenswrapper[4816]: I0216 14:38:51.794255 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.108201 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-59f8cff499-bz5wq"] Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.109929 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.117155 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.162513 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-bz5wq"] Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.190095 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/23ea82c9-c475-4eba-abd4-3af555a67d1b-amphora-image\") pod \"octavia-image-upload-59f8cff499-bz5wq\" (UID: \"23ea82c9-c475-4eba-abd4-3af555a67d1b\") " pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.190152 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23ea82c9-c475-4eba-abd4-3af555a67d1b-httpd-config\") pod \"octavia-image-upload-59f8cff499-bz5wq\" (UID: \"23ea82c9-c475-4eba-abd4-3af555a67d1b\") " pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.294490 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/23ea82c9-c475-4eba-abd4-3af555a67d1b-amphora-image\") pod \"octavia-image-upload-59f8cff499-bz5wq\" (UID: \"23ea82c9-c475-4eba-abd4-3af555a67d1b\") " pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.294606 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23ea82c9-c475-4eba-abd4-3af555a67d1b-httpd-config\") pod \"octavia-image-upload-59f8cff499-bz5wq\" (UID: \"23ea82c9-c475-4eba-abd4-3af555a67d1b\") " pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.295844 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/23ea82c9-c475-4eba-abd4-3af555a67d1b-amphora-image\") pod \"octavia-image-upload-59f8cff499-bz5wq\" (UID: \"23ea82c9-c475-4eba-abd4-3af555a67d1b\") " pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.309241 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23ea82c9-c475-4eba-abd4-3af555a67d1b-httpd-config\") pod \"octavia-image-upload-59f8cff499-bz5wq\" (UID: \"23ea82c9-c475-4eba-abd4-3af555a67d1b\") " pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.342497 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-2brcl"] Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.441561 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.550519 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-2brcl"] Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.834852 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-2brcl" event={"ID":"dda86147-89af-40b9-ad51-1d952a483747","Type":"ContainerStarted","Data":"d94c4ddf5eec7656d0d49449cad5e0a54a5f328ece41a9ff896378c720577c0e"} Feb 16 14:38:52 crc kubenswrapper[4816]: I0216 14:38:52.984710 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-bz5wq"] Feb 16 14:38:52 crc kubenswrapper[4816]: W0216 14:38:52.992954 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23ea82c9_c475_4eba_abd4_3af555a67d1b.slice/crio-2e2bfabf2297986e5f0e1f593907858f51e52af04df2ee2a1d63720ddd162ec0 WatchSource:0}: Error finding container 2e2bfabf2297986e5f0e1f593907858f51e52af04df2ee2a1d63720ddd162ec0: Status 404 returned error can't find the container with id 2e2bfabf2297986e5f0e1f593907858f51e52af04df2ee2a1d63720ddd162ec0 Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.502097 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-clbqk"] Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.504492 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.506637 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.513340 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-clbqk"] Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.620494 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data-merged\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.620555 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-scripts\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.620614 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.620650 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-combined-ca-bundle\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.722152 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data-merged\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.722208 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-scripts\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.722266 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.722301 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-combined-ca-bundle\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.723906 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data-merged\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.728087 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-combined-ca-bundle\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.729050 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.729967 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-scripts\") pod \"octavia-db-sync-clbqk\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.845022 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-clbqk" Feb 16 14:38:53 crc kubenswrapper[4816]: I0216 14:38:53.867401 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" event={"ID":"23ea82c9-c475-4eba-abd4-3af555a67d1b","Type":"ContainerStarted","Data":"2e2bfabf2297986e5f0e1f593907858f51e52af04df2ee2a1d63720ddd162ec0"} Feb 16 14:38:54 crc kubenswrapper[4816]: I0216 14:38:54.395272 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-clbqk"] Feb 16 14:38:54 crc kubenswrapper[4816]: W0216 14:38:54.704202 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74071f7f_b4f9_4695_9ab0_26a6b9a5027a.slice/crio-0ad3180855fe3294e957141e1b193bffbaa6ca73e85e7f2ae89f3f938db85e58 WatchSource:0}: Error finding container 0ad3180855fe3294e957141e1b193bffbaa6ca73e85e7f2ae89f3f938db85e58: Status 404 returned error can't find the container with id 0ad3180855fe3294e957141e1b193bffbaa6ca73e85e7f2ae89f3f938db85e58 Feb 16 14:38:54 crc kubenswrapper[4816]: I0216 14:38:54.919672 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-2brcl" event={"ID":"dda86147-89af-40b9-ad51-1d952a483747","Type":"ContainerStarted","Data":"5d04dfb04a430c25bfc080241f58723f4c42974f04b8a7b34da5b049fa1467ae"} Feb 16 14:38:54 crc kubenswrapper[4816]: I0216 14:38:54.924152 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-clbqk" event={"ID":"74071f7f-b4f9-4695-9ab0-26a6b9a5027a","Type":"ContainerStarted","Data":"0ad3180855fe3294e957141e1b193bffbaa6ca73e85e7f2ae89f3f938db85e58"} Feb 16 14:38:55 crc kubenswrapper[4816]: I0216 14:38:55.932383 4816 generic.go:334] "Generic (PLEG): container finished" podID="74071f7f-b4f9-4695-9ab0-26a6b9a5027a" containerID="48097c4a62170ad5dd59aebf8f967d1801dc907d4fbac89f146c3a51cf08a921" exitCode=0 Feb 16 14:38:55 crc kubenswrapper[4816]: I0216 14:38:55.932436 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-clbqk" event={"ID":"74071f7f-b4f9-4695-9ab0-26a6b9a5027a","Type":"ContainerDied","Data":"48097c4a62170ad5dd59aebf8f967d1801dc907d4fbac89f146c3a51cf08a921"} Feb 16 14:38:56 crc kubenswrapper[4816]: I0216 14:38:56.946608 4816 generic.go:334] "Generic (PLEG): container finished" podID="dda86147-89af-40b9-ad51-1d952a483747" containerID="5d04dfb04a430c25bfc080241f58723f4c42974f04b8a7b34da5b049fa1467ae" exitCode=0 Feb 16 14:38:56 crc kubenswrapper[4816]: I0216 14:38:56.946702 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-2brcl" event={"ID":"dda86147-89af-40b9-ad51-1d952a483747","Type":"ContainerDied","Data":"5d04dfb04a430c25bfc080241f58723f4c42974f04b8a7b34da5b049fa1467ae"} Feb 16 14:38:56 crc kubenswrapper[4816]: I0216 14:38:56.950180 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-clbqk" event={"ID":"74071f7f-b4f9-4695-9ab0-26a6b9a5027a","Type":"ContainerStarted","Data":"e6a31bdda7c1bebd32c3366596f020ee18fdd7694acfdcd04a4546ffae00cbf5"} Feb 16 14:38:56 crc kubenswrapper[4816]: I0216 14:38:56.986168 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-clbqk" podStartSLOduration=3.986148524 podStartE2EDuration="3.986148524s" podCreationTimestamp="2026-02-16 14:38:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:38:56.983124872 +0000 UTC m=+5736.309838600" watchObservedRunningTime="2026-02-16 14:38:56.986148524 +0000 UTC m=+5736.312862252" Feb 16 14:38:59 crc kubenswrapper[4816]: E0216 14:38:59.686742 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice/crio-2a917bc1ee172d13163faf34d83435017e77edaae3f134c14ac88bebcaefba40\": RecentStats: unable to find data in memory cache]" Feb 16 14:39:01 crc kubenswrapper[4816]: I0216 14:39:01.023925 4816 generic.go:334] "Generic (PLEG): container finished" podID="74071f7f-b4f9-4695-9ab0-26a6b9a5027a" containerID="e6a31bdda7c1bebd32c3366596f020ee18fdd7694acfdcd04a4546ffae00cbf5" exitCode=0 Feb 16 14:39:01 crc kubenswrapper[4816]: I0216 14:39:01.024486 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-clbqk" event={"ID":"74071f7f-b4f9-4695-9ab0-26a6b9a5027a","Type":"ContainerDied","Data":"e6a31bdda7c1bebd32c3366596f020ee18fdd7694acfdcd04a4546ffae00cbf5"} Feb 16 14:39:04 crc kubenswrapper[4816]: I0216 14:39:04.788527 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-clbqk" Feb 16 14:39:04 crc kubenswrapper[4816]: I0216 14:39:04.985464 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data-merged\") pod \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " Feb 16 14:39:04 crc kubenswrapper[4816]: I0216 14:39:04.985539 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-combined-ca-bundle\") pod \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " Feb 16 14:39:04 crc kubenswrapper[4816]: I0216 14:39:04.985584 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data\") pod \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " Feb 16 14:39:04 crc kubenswrapper[4816]: I0216 14:39:04.985754 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-scripts\") pod \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\" (UID: \"74071f7f-b4f9-4695-9ab0-26a6b9a5027a\") " Feb 16 14:39:04 crc kubenswrapper[4816]: I0216 14:39:04.993338 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-scripts" (OuterVolumeSpecName: "scripts") pod "74071f7f-b4f9-4695-9ab0-26a6b9a5027a" (UID: "74071f7f-b4f9-4695-9ab0-26a6b9a5027a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:39:04 crc kubenswrapper[4816]: I0216 14:39:04.997875 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data" (OuterVolumeSpecName: "config-data") pod "74071f7f-b4f9-4695-9ab0-26a6b9a5027a" (UID: "74071f7f-b4f9-4695-9ab0-26a6b9a5027a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.019200 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "74071f7f-b4f9-4695-9ab0-26a6b9a5027a" (UID: "74071f7f-b4f9-4695-9ab0-26a6b9a5027a"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.021447 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74071f7f-b4f9-4695-9ab0-26a6b9a5027a" (UID: "74071f7f-b4f9-4695-9ab0-26a6b9a5027a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.087629 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.087670 4816 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data-merged\") on node \"crc\" DevicePath \"\"" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.087682 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.087693 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74071f7f-b4f9-4695-9ab0-26a6b9a5027a-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.089902 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-clbqk" event={"ID":"74071f7f-b4f9-4695-9ab0-26a6b9a5027a","Type":"ContainerDied","Data":"0ad3180855fe3294e957141e1b193bffbaa6ca73e85e7f2ae89f3f938db85e58"} Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.089941 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ad3180855fe3294e957141e1b193bffbaa6ca73e85e7f2ae89f3f938db85e58" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.090009 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-clbqk" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.542774 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:39:05 crc kubenswrapper[4816]: I0216 14:39:05.745187 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-7859468b99-2chct" Feb 16 14:39:06 crc kubenswrapper[4816]: I0216 14:39:06.099810 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-2brcl" event={"ID":"dda86147-89af-40b9-ad51-1d952a483747","Type":"ContainerStarted","Data":"9d25511f5ad71baf0ce9c89279648b70190165a2bf6d321c28fa62af76a7611a"} Feb 16 14:39:06 crc kubenswrapper[4816]: I0216 14:39:06.100048 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:39:06 crc kubenswrapper[4816]: I0216 14:39:06.101181 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" event={"ID":"23ea82c9-c475-4eba-abd4-3af555a67d1b","Type":"ContainerStarted","Data":"c21b0c7e49dfad3a38e053c7e2af0473d7dfe30c0569f7d62d82f5b7342f6f96"} Feb 16 14:39:06 crc kubenswrapper[4816]: I0216 14:39:06.136344 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-2brcl" podStartSLOduration=2.166369573 podStartE2EDuration="15.136320205s" podCreationTimestamp="2026-02-16 14:38:51 +0000 UTC" firstStartedPulling="2026-02-16 14:38:52.365159867 +0000 UTC m=+5731.691873595" lastFinishedPulling="2026-02-16 14:39:05.335110499 +0000 UTC m=+5744.661824227" observedRunningTime="2026-02-16 14:39:06.122864058 +0000 UTC m=+5745.449577786" watchObservedRunningTime="2026-02-16 14:39:06.136320205 +0000 UTC m=+5745.463033933" Feb 16 14:39:06 crc kubenswrapper[4816]: I0216 14:39:06.941493 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:39:06 crc kubenswrapper[4816]: I0216 14:39:06.941887 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:39:07 crc kubenswrapper[4816]: I0216 14:39:07.111570 4816 generic.go:334] "Generic (PLEG): container finished" podID="23ea82c9-c475-4eba-abd4-3af555a67d1b" containerID="c21b0c7e49dfad3a38e053c7e2af0473d7dfe30c0569f7d62d82f5b7342f6f96" exitCode=0 Feb 16 14:39:07 crc kubenswrapper[4816]: I0216 14:39:07.112619 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" event={"ID":"23ea82c9-c475-4eba-abd4-3af555a67d1b","Type":"ContainerDied","Data":"c21b0c7e49dfad3a38e053c7e2af0473d7dfe30c0569f7d62d82f5b7342f6f96"} Feb 16 14:39:09 crc kubenswrapper[4816]: I0216 14:39:09.142506 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" event={"ID":"23ea82c9-c475-4eba-abd4-3af555a67d1b","Type":"ContainerStarted","Data":"9dea1ed72e3c2242512c437d988880780b2d245ef9de47d5bffcd43c40d84393"} Feb 16 14:39:09 crc kubenswrapper[4816]: I0216 14:39:09.167039 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" podStartSLOduration=1.677261653 podStartE2EDuration="17.1670131s" podCreationTimestamp="2026-02-16 14:38:52 +0000 UTC" firstStartedPulling="2026-02-16 14:38:52.99644999 +0000 UTC m=+5732.323163718" lastFinishedPulling="2026-02-16 14:39:08.486201437 +0000 UTC m=+5747.812915165" observedRunningTime="2026-02-16 14:39:09.160947855 +0000 UTC m=+5748.487661583" watchObservedRunningTime="2026-02-16 14:39:09.1670131 +0000 UTC m=+5748.493726828" Feb 16 14:39:09 crc kubenswrapper[4816]: E0216 14:39:09.931476 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice/crio-2a917bc1ee172d13163faf34d83435017e77edaae3f134c14ac88bebcaefba40\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:39:20 crc kubenswrapper[4816]: E0216 14:39:20.166268 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6179ceaa_76f9_4804_80b5_27ffb9ee36c7.slice/crio-2a917bc1ee172d13163faf34d83435017e77edaae3f134c14ac88bebcaefba40\": RecentStats: unable to find data in memory cache]" Feb 16 14:39:21 crc kubenswrapper[4816]: I0216 14:39:21.827543 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-2brcl" Feb 16 14:39:29 crc kubenswrapper[4816]: I0216 14:39:29.769565 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-bz5wq"] Feb 16 14:39:29 crc kubenswrapper[4816]: I0216 14:39:29.770304 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" podUID="23ea82c9-c475-4eba-abd4-3af555a67d1b" containerName="octavia-amphora-httpd" containerID="cri-o://9dea1ed72e3c2242512c437d988880780b2d245ef9de47d5bffcd43c40d84393" gracePeriod=30 Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.438785 4816 generic.go:334] "Generic (PLEG): container finished" podID="23ea82c9-c475-4eba-abd4-3af555a67d1b" containerID="9dea1ed72e3c2242512c437d988880780b2d245ef9de47d5bffcd43c40d84393" exitCode=0 Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.438842 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" event={"ID":"23ea82c9-c475-4eba-abd4-3af555a67d1b","Type":"ContainerDied","Data":"9dea1ed72e3c2242512c437d988880780b2d245ef9de47d5bffcd43c40d84393"} Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.439061 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" event={"ID":"23ea82c9-c475-4eba-abd4-3af555a67d1b","Type":"ContainerDied","Data":"2e2bfabf2297986e5f0e1f593907858f51e52af04df2ee2a1d63720ddd162ec0"} Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.439074 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e2bfabf2297986e5f0e1f593907858f51e52af04df2ee2a1d63720ddd162ec0" Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.471624 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.610175 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23ea82c9-c475-4eba-abd4-3af555a67d1b-httpd-config\") pod \"23ea82c9-c475-4eba-abd4-3af555a67d1b\" (UID: \"23ea82c9-c475-4eba-abd4-3af555a67d1b\") " Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.610224 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/23ea82c9-c475-4eba-abd4-3af555a67d1b-amphora-image\") pod \"23ea82c9-c475-4eba-abd4-3af555a67d1b\" (UID: \"23ea82c9-c475-4eba-abd4-3af555a67d1b\") " Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.640431 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ea82c9-c475-4eba-abd4-3af555a67d1b-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "23ea82c9-c475-4eba-abd4-3af555a67d1b" (UID: "23ea82c9-c475-4eba-abd4-3af555a67d1b"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.696075 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23ea82c9-c475-4eba-abd4-3af555a67d1b-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "23ea82c9-c475-4eba-abd4-3af555a67d1b" (UID: "23ea82c9-c475-4eba-abd4-3af555a67d1b"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.712800 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23ea82c9-c475-4eba-abd4-3af555a67d1b-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:39:30 crc kubenswrapper[4816]: I0216 14:39:30.712856 4816 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/23ea82c9-c475-4eba-abd4-3af555a67d1b-amphora-image\") on node \"crc\" DevicePath \"\"" Feb 16 14:39:31 crc kubenswrapper[4816]: I0216 14:39:31.453561 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-bz5wq" Feb 16 14:39:31 crc kubenswrapper[4816]: I0216 14:39:31.485281 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-bz5wq"] Feb 16 14:39:31 crc kubenswrapper[4816]: I0216 14:39:31.493330 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-bz5wq"] Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.800767 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-59f8cff499-g6rrq"] Feb 16 14:39:32 crc kubenswrapper[4816]: E0216 14:39:32.802558 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ea82c9-c475-4eba-abd4-3af555a67d1b" containerName="octavia-amphora-httpd" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.802693 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ea82c9-c475-4eba-abd4-3af555a67d1b" containerName="octavia-amphora-httpd" Feb 16 14:39:32 crc kubenswrapper[4816]: E0216 14:39:32.802806 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74071f7f-b4f9-4695-9ab0-26a6b9a5027a" containerName="octavia-db-sync" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.802896 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="74071f7f-b4f9-4695-9ab0-26a6b9a5027a" containerName="octavia-db-sync" Feb 16 14:39:32 crc kubenswrapper[4816]: E0216 14:39:32.802999 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74071f7f-b4f9-4695-9ab0-26a6b9a5027a" containerName="init" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.803079 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="74071f7f-b4f9-4695-9ab0-26a6b9a5027a" containerName="init" Feb 16 14:39:32 crc kubenswrapper[4816]: E0216 14:39:32.803152 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ea82c9-c475-4eba-abd4-3af555a67d1b" containerName="init" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.803221 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ea82c9-c475-4eba-abd4-3af555a67d1b" containerName="init" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.803580 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="23ea82c9-c475-4eba-abd4-3af555a67d1b" containerName="octavia-amphora-httpd" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.803878 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="74071f7f-b4f9-4695-9ab0-26a6b9a5027a" containerName="octavia-db-sync" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.805286 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-g6rrq" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.811483 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.824362 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-g6rrq"] Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.862945 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/cf336b82-22b2-4f4b-a120-d0141487192f-amphora-image\") pod \"octavia-image-upload-59f8cff499-g6rrq\" (UID: \"cf336b82-22b2-4f4b-a120-d0141487192f\") " pod="openstack/octavia-image-upload-59f8cff499-g6rrq" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.863145 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf336b82-22b2-4f4b-a120-d0141487192f-httpd-config\") pod \"octavia-image-upload-59f8cff499-g6rrq\" (UID: \"cf336b82-22b2-4f4b-a120-d0141487192f\") " pod="openstack/octavia-image-upload-59f8cff499-g6rrq" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.964557 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf336b82-22b2-4f4b-a120-d0141487192f-httpd-config\") pod \"octavia-image-upload-59f8cff499-g6rrq\" (UID: \"cf336b82-22b2-4f4b-a120-d0141487192f\") " pod="openstack/octavia-image-upload-59f8cff499-g6rrq" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.964727 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/cf336b82-22b2-4f4b-a120-d0141487192f-amphora-image\") pod \"octavia-image-upload-59f8cff499-g6rrq\" (UID: \"cf336b82-22b2-4f4b-a120-d0141487192f\") " pod="openstack/octavia-image-upload-59f8cff499-g6rrq" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.965360 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/cf336b82-22b2-4f4b-a120-d0141487192f-amphora-image\") pod \"octavia-image-upload-59f8cff499-g6rrq\" (UID: \"cf336b82-22b2-4f4b-a120-d0141487192f\") " pod="openstack/octavia-image-upload-59f8cff499-g6rrq" Feb 16 14:39:32 crc kubenswrapper[4816]: I0216 14:39:32.971235 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf336b82-22b2-4f4b-a120-d0141487192f-httpd-config\") pod \"octavia-image-upload-59f8cff499-g6rrq\" (UID: \"cf336b82-22b2-4f4b-a120-d0141487192f\") " pod="openstack/octavia-image-upload-59f8cff499-g6rrq" Feb 16 14:39:33 crc kubenswrapper[4816]: I0216 14:39:33.124961 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-g6rrq" Feb 16 14:39:33 crc kubenswrapper[4816]: I0216 14:39:33.410520 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23ea82c9-c475-4eba-abd4-3af555a67d1b" path="/var/lib/kubelet/pods/23ea82c9-c475-4eba-abd4-3af555a67d1b/volumes" Feb 16 14:39:33 crc kubenswrapper[4816]: I0216 14:39:33.672037 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-g6rrq"] Feb 16 14:39:34 crc kubenswrapper[4816]: I0216 14:39:34.484434 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-g6rrq" event={"ID":"cf336b82-22b2-4f4b-a120-d0141487192f","Type":"ContainerStarted","Data":"eb5af750d42f64fb7cfe014d8c9187c876bced91c1cc3546e2a7ad4d26aaa4bc"} Feb 16 14:39:34 crc kubenswrapper[4816]: I0216 14:39:34.484792 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-g6rrq" event={"ID":"cf336b82-22b2-4f4b-a120-d0141487192f","Type":"ContainerStarted","Data":"5219dfdbe547f37ec49c46d99031a2e57f212a20dde4a0cd3f01c32771f10af9"} Feb 16 14:39:35 crc kubenswrapper[4816]: I0216 14:39:35.494643 4816 generic.go:334] "Generic (PLEG): container finished" podID="cf336b82-22b2-4f4b-a120-d0141487192f" containerID="eb5af750d42f64fb7cfe014d8c9187c876bced91c1cc3546e2a7ad4d26aaa4bc" exitCode=0 Feb 16 14:39:35 crc kubenswrapper[4816]: I0216 14:39:35.494807 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-g6rrq" event={"ID":"cf336b82-22b2-4f4b-a120-d0141487192f","Type":"ContainerDied","Data":"eb5af750d42f64fb7cfe014d8c9187c876bced91c1cc3546e2a7ad4d26aaa4bc"} Feb 16 14:39:36 crc kubenswrapper[4816]: I0216 14:39:36.941159 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:39:36 crc kubenswrapper[4816]: I0216 14:39:36.941547 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:39:37 crc kubenswrapper[4816]: I0216 14:39:37.518930 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-g6rrq" event={"ID":"cf336b82-22b2-4f4b-a120-d0141487192f","Type":"ContainerStarted","Data":"c323ddf0fb52b46338e2b4c2444d833cea88240320b8a0c276db5f62796ae111"} Feb 16 14:39:37 crc kubenswrapper[4816]: I0216 14:39:37.545049 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-59f8cff499-g6rrq" podStartSLOduration=2.8315470339999997 podStartE2EDuration="5.545033851s" podCreationTimestamp="2026-02-16 14:39:32 +0000 UTC" firstStartedPulling="2026-02-16 14:39:33.674231269 +0000 UTC m=+5773.000944997" lastFinishedPulling="2026-02-16 14:39:36.387718086 +0000 UTC m=+5775.714431814" observedRunningTime="2026-02-16 14:39:37.54386497 +0000 UTC m=+5776.870578698" watchObservedRunningTime="2026-02-16 14:39:37.545033851 +0000 UTC m=+5776.871747579" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.083665 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-8vhbw"] Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.086253 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.089181 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.089980 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.089998 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.097285 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-8vhbw"] Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.213107 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/279f09b2-16a7-4be2-a83b-dba7b3794fd3-hm-ports\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.213220 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-config-data\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.213373 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/279f09b2-16a7-4be2-a83b-dba7b3794fd3-config-data-merged\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.213500 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-combined-ca-bundle\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.213539 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-amphora-certs\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.213847 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-scripts\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.315854 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/279f09b2-16a7-4be2-a83b-dba7b3794fd3-config-data-merged\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.315927 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-combined-ca-bundle\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.315965 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-amphora-certs\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.316074 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-scripts\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.316123 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/279f09b2-16a7-4be2-a83b-dba7b3794fd3-hm-ports\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.316194 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-config-data\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.317308 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/279f09b2-16a7-4be2-a83b-dba7b3794fd3-config-data-merged\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.317322 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/279f09b2-16a7-4be2-a83b-dba7b3794fd3-hm-ports\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.322147 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-combined-ca-bundle\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.322212 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-amphora-certs\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.325323 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-scripts\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.330049 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/279f09b2-16a7-4be2-a83b-dba7b3794fd3-config-data\") pod \"octavia-healthmanager-8vhbw\" (UID: \"279f09b2-16a7-4be2-a83b-dba7b3794fd3\") " pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:56 crc kubenswrapper[4816]: I0216 14:39:56.409631 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:39:57 crc kubenswrapper[4816]: W0216 14:39:57.054914 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod279f09b2_16a7_4be2_a83b_dba7b3794fd3.slice/crio-1e9a1cddd4b502df6ff2f6ab50ac280c356309b69be1261c196054ffdd8127ef WatchSource:0}: Error finding container 1e9a1cddd4b502df6ff2f6ab50ac280c356309b69be1261c196054ffdd8127ef: Status 404 returned error can't find the container with id 1e9a1cddd4b502df6ff2f6ab50ac280c356309b69be1261c196054ffdd8127ef Feb 16 14:39:57 crc kubenswrapper[4816]: I0216 14:39:57.066889 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-8vhbw"] Feb 16 14:39:57 crc kubenswrapper[4816]: I0216 14:39:57.775169 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-8vhbw" event={"ID":"279f09b2-16a7-4be2-a83b-dba7b3794fd3","Type":"ContainerStarted","Data":"5f1be6193f5e53e1590537ab1d2a328ff62434ea95a86bbaf8cedfd389d7555c"} Feb 16 14:39:57 crc kubenswrapper[4816]: I0216 14:39:57.775502 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-8vhbw" event={"ID":"279f09b2-16a7-4be2-a83b-dba7b3794fd3","Type":"ContainerStarted","Data":"1e9a1cddd4b502df6ff2f6ab50ac280c356309b69be1261c196054ffdd8127ef"} Feb 16 14:39:57 crc kubenswrapper[4816]: I0216 14:39:57.885474 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-4xd2x"] Feb 16 14:39:57 crc kubenswrapper[4816]: I0216 14:39:57.887340 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:57 crc kubenswrapper[4816]: I0216 14:39:57.890318 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Feb 16 14:39:57 crc kubenswrapper[4816]: I0216 14:39:57.890332 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Feb 16 14:39:57 crc kubenswrapper[4816]: I0216 14:39:57.901844 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-4xd2x"] Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:57.998822 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4e86a794-696e-45be-b7fc-5a9df7c3bab5-config-data-merged\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.000178 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-combined-ca-bundle\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.000260 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-scripts\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.000319 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-config-data\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.000417 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/4e86a794-696e-45be-b7fc-5a9df7c3bab5-hm-ports\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.000469 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-amphora-certs\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.103118 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4e86a794-696e-45be-b7fc-5a9df7c3bab5-config-data-merged\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.103187 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-combined-ca-bundle\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.103231 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-scripts\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.103270 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-config-data\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.103328 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/4e86a794-696e-45be-b7fc-5a9df7c3bab5-hm-ports\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.103370 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-amphora-certs\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.103812 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4e86a794-696e-45be-b7fc-5a9df7c3bab5-config-data-merged\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.104765 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/4e86a794-696e-45be-b7fc-5a9df7c3bab5-hm-ports\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.109098 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-combined-ca-bundle\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.109503 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-amphora-certs\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.109864 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-config-data\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.111023 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e86a794-696e-45be-b7fc-5a9df7c3bab5-scripts\") pod \"octavia-housekeeping-4xd2x\" (UID: \"4e86a794-696e-45be-b7fc-5a9df7c3bab5\") " pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.204601 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:39:58 crc kubenswrapper[4816]: I0216 14:39:58.871486 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-4xd2x"] Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.084498 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-w4cm6"] Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.087183 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.090152 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.090323 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.127379 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-w4cm6"] Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.271045 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/9982c371-5ece-4660-bc85-25e726887e29-hm-ports\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.271117 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-config-data\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.271253 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9982c371-5ece-4660-bc85-25e726887e29-config-data-merged\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.271371 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-scripts\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.271408 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-amphora-certs\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.271446 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-combined-ca-bundle\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.373648 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9982c371-5ece-4660-bc85-25e726887e29-config-data-merged\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.373808 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-scripts\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.373843 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-amphora-certs\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.373867 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-combined-ca-bundle\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.373959 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/9982c371-5ece-4660-bc85-25e726887e29-hm-ports\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.374003 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-config-data\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.374122 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/9982c371-5ece-4660-bc85-25e726887e29-config-data-merged\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.375372 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/9982c371-5ece-4660-bc85-25e726887e29-hm-ports\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.380598 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-combined-ca-bundle\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.381007 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-amphora-certs\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.381822 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-scripts\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.391275 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9982c371-5ece-4660-bc85-25e726887e29-config-data\") pod \"octavia-worker-w4cm6\" (UID: \"9982c371-5ece-4660-bc85-25e726887e29\") " pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.436506 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-w4cm6" Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.808014 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-4xd2x" event={"ID":"4e86a794-696e-45be-b7fc-5a9df7c3bab5","Type":"ContainerStarted","Data":"904deff30d6aadb8fa29415214cd304fcd8fffa68141dfcef27d5adcc3a2c841"} Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.809511 4816 generic.go:334] "Generic (PLEG): container finished" podID="279f09b2-16a7-4be2-a83b-dba7b3794fd3" containerID="5f1be6193f5e53e1590537ab1d2a328ff62434ea95a86bbaf8cedfd389d7555c" exitCode=0 Feb 16 14:39:59 crc kubenswrapper[4816]: I0216 14:39:59.809620 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-8vhbw" event={"ID":"279f09b2-16a7-4be2-a83b-dba7b3794fd3","Type":"ContainerDied","Data":"5f1be6193f5e53e1590537ab1d2a328ff62434ea95a86bbaf8cedfd389d7555c"} Feb 16 14:40:00 crc kubenswrapper[4816]: I0216 14:40:00.051114 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-w4cm6"] Feb 16 14:40:00 crc kubenswrapper[4816]: I0216 14:40:00.825999 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-4xd2x" event={"ID":"4e86a794-696e-45be-b7fc-5a9df7c3bab5","Type":"ContainerStarted","Data":"fe191d99754ecb1c6b006bfbe5d5b1a59f4761d997d0411990c7380694aa9dbc"} Feb 16 14:40:00 crc kubenswrapper[4816]: I0216 14:40:00.830582 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-8vhbw" event={"ID":"279f09b2-16a7-4be2-a83b-dba7b3794fd3","Type":"ContainerStarted","Data":"91c1b3b0a20d67e776c55d5a7a2a6f02c960b142a124a37732a7b990f341892d"} Feb 16 14:40:00 crc kubenswrapper[4816]: I0216 14:40:00.830834 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:40:00 crc kubenswrapper[4816]: I0216 14:40:00.832117 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-w4cm6" event={"ID":"9982c371-5ece-4660-bc85-25e726887e29","Type":"ContainerStarted","Data":"7aad9fd7873348a5fee69d8182502bcc8814dd608b2bb413473f7b54c0b8ac2d"} Feb 16 14:40:00 crc kubenswrapper[4816]: I0216 14:40:00.873625 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-8vhbw" podStartSLOduration=4.873598101 podStartE2EDuration="4.873598101s" podCreationTimestamp="2026-02-16 14:39:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:40:00.869222001 +0000 UTC m=+5800.195935729" watchObservedRunningTime="2026-02-16 14:40:00.873598101 +0000 UTC m=+5800.200311829" Feb 16 14:40:01 crc kubenswrapper[4816]: I0216 14:40:01.268442 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-8vhbw"] Feb 16 14:40:01 crc kubenswrapper[4816]: I0216 14:40:01.844480 4816 generic.go:334] "Generic (PLEG): container finished" podID="4e86a794-696e-45be-b7fc-5a9df7c3bab5" containerID="fe191d99754ecb1c6b006bfbe5d5b1a59f4761d997d0411990c7380694aa9dbc" exitCode=0 Feb 16 14:40:01 crc kubenswrapper[4816]: I0216 14:40:01.844577 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-4xd2x" event={"ID":"4e86a794-696e-45be-b7fc-5a9df7c3bab5","Type":"ContainerDied","Data":"fe191d99754ecb1c6b006bfbe5d5b1a59f4761d997d0411990c7380694aa9dbc"} Feb 16 14:40:02 crc kubenswrapper[4816]: I0216 14:40:02.857337 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-w4cm6" event={"ID":"9982c371-5ece-4660-bc85-25e726887e29","Type":"ContainerStarted","Data":"2950ba3d8370eb0ae3c821c6be9d1a8926c84d2b2a7843a39680192ac15ae4f6"} Feb 16 14:40:02 crc kubenswrapper[4816]: I0216 14:40:02.859806 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-4xd2x" event={"ID":"4e86a794-696e-45be-b7fc-5a9df7c3bab5","Type":"ContainerStarted","Data":"3cb5c98da64c5abbac07fe8f83769cafe5219c8f4de141e2c8c793e6b5f99190"} Feb 16 14:40:02 crc kubenswrapper[4816]: I0216 14:40:02.860479 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:40:02 crc kubenswrapper[4816]: I0216 14:40:02.899782 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-4xd2x" podStartSLOduration=4.615870961 podStartE2EDuration="5.899735386s" podCreationTimestamp="2026-02-16 14:39:57 +0000 UTC" firstStartedPulling="2026-02-16 14:39:58.882258215 +0000 UTC m=+5798.208971943" lastFinishedPulling="2026-02-16 14:40:00.16612264 +0000 UTC m=+5799.492836368" observedRunningTime="2026-02-16 14:40:02.894621426 +0000 UTC m=+5802.221335164" watchObservedRunningTime="2026-02-16 14:40:02.899735386 +0000 UTC m=+5802.226449114" Feb 16 14:40:03 crc kubenswrapper[4816]: I0216 14:40:03.870625 4816 generic.go:334] "Generic (PLEG): container finished" podID="9982c371-5ece-4660-bc85-25e726887e29" containerID="2950ba3d8370eb0ae3c821c6be9d1a8926c84d2b2a7843a39680192ac15ae4f6" exitCode=0 Feb 16 14:40:03 crc kubenswrapper[4816]: I0216 14:40:03.870778 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-w4cm6" event={"ID":"9982c371-5ece-4660-bc85-25e726887e29","Type":"ContainerDied","Data":"2950ba3d8370eb0ae3c821c6be9d1a8926c84d2b2a7843a39680192ac15ae4f6"} Feb 16 14:40:04 crc kubenswrapper[4816]: I0216 14:40:04.881728 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-w4cm6" event={"ID":"9982c371-5ece-4660-bc85-25e726887e29","Type":"ContainerStarted","Data":"8c886de4706013c31786a6d3d28e5ad442a60bb537873e1000e859363807dcc7"} Feb 16 14:40:04 crc kubenswrapper[4816]: I0216 14:40:04.882220 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-w4cm6" Feb 16 14:40:04 crc kubenswrapper[4816]: I0216 14:40:04.904298 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-w4cm6" podStartSLOduration=4.614096583 podStartE2EDuration="5.904273351s" podCreationTimestamp="2026-02-16 14:39:59 +0000 UTC" firstStartedPulling="2026-02-16 14:40:00.113508146 +0000 UTC m=+5799.440221874" lastFinishedPulling="2026-02-16 14:40:01.403684914 +0000 UTC m=+5800.730398642" observedRunningTime="2026-02-16 14:40:04.901410563 +0000 UTC m=+5804.228124301" watchObservedRunningTime="2026-02-16 14:40:04.904273351 +0000 UTC m=+5804.230987099" Feb 16 14:40:06 crc kubenswrapper[4816]: I0216 14:40:06.940838 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:40:06 crc kubenswrapper[4816]: I0216 14:40:06.942393 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:40:06 crc kubenswrapper[4816]: I0216 14:40:06.942519 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:40:06 crc kubenswrapper[4816]: I0216 14:40:06.943395 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cd2cf9fe35bf79ffe24a1838af8ddd11554eb97681bc8bd3d107b775cf4e6572"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:40:06 crc kubenswrapper[4816]: I0216 14:40:06.943566 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://cd2cf9fe35bf79ffe24a1838af8ddd11554eb97681bc8bd3d107b775cf4e6572" gracePeriod=600 Feb 16 14:40:07 crc kubenswrapper[4816]: I0216 14:40:07.912097 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="cd2cf9fe35bf79ffe24a1838af8ddd11554eb97681bc8bd3d107b775cf4e6572" exitCode=0 Feb 16 14:40:07 crc kubenswrapper[4816]: I0216 14:40:07.912169 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"cd2cf9fe35bf79ffe24a1838af8ddd11554eb97681bc8bd3d107b775cf4e6572"} Feb 16 14:40:07 crc kubenswrapper[4816]: I0216 14:40:07.912715 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2"} Feb 16 14:40:07 crc kubenswrapper[4816]: I0216 14:40:07.912768 4816 scope.go:117] "RemoveContainer" containerID="3dcb96e2f58bc4f4840ac9b281b8c48f89d8fdd68fb2364baccea2c99dda06a9" Feb 16 14:40:11 crc kubenswrapper[4816]: I0216 14:40:11.437717 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-8vhbw" Feb 16 14:40:13 crc kubenswrapper[4816]: I0216 14:40:13.233924 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-4xd2x" Feb 16 14:40:14 crc kubenswrapper[4816]: I0216 14:40:14.465205 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-w4cm6" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.323593 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6b9f4ffc55-cnndq"] Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.326327 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.331250 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.331678 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.339077 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.340479 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-x46wd" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.351226 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6b9f4ffc55-cnndq"] Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.381503 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794feb25-7ead-47e2-a28e-3f3b6e2f209d-logs\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.381568 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/794feb25-7ead-47e2-a28e-3f3b6e2f209d-horizon-secret-key\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.381608 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-scripts\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.381631 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vspzv\" (UniqueName: \"kubernetes.io/projected/794feb25-7ead-47e2-a28e-3f3b6e2f209d-kube-api-access-vspzv\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.381704 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-config-data\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.401512 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.401746 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerName="glance-log" containerID="cri-o://43413c47567c8111914ba4a1037a4cc0f5821d9898526a33e2c41c621a102d49" gracePeriod=30 Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.402235 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerName="glance-httpd" containerID="cri-o://45901d657d087e3bd7cf2781c50be19b30636d3b0ad263acb29ec6c8e41ea381" gracePeriod=30 Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.438973 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-65987c9657-tmn5s"] Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.440625 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.470081 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-65987c9657-tmn5s"] Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.485476 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/794feb25-7ead-47e2-a28e-3f3b6e2f209d-horizon-secret-key\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.485560 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-scripts\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.485593 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vspzv\" (UniqueName: \"kubernetes.io/projected/794feb25-7ead-47e2-a28e-3f3b6e2f209d-kube-api-access-vspzv\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.485633 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-config-data\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.485834 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794feb25-7ead-47e2-a28e-3f3b6e2f209d-logs\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.486631 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794feb25-7ead-47e2-a28e-3f3b6e2f209d-logs\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.486805 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-scripts\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.487433 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-config-data\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.492845 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/794feb25-7ead-47e2-a28e-3f3b6e2f209d-horizon-secret-key\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.502702 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.503013 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerName="glance-log" containerID="cri-o://5962c18ea0aa4a789e7d1f8743586abb8a592c8960bb2701414b7b3d45c0c5ac" gracePeriod=30 Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.503151 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerName="glance-httpd" containerID="cri-o://89ea358324e95e1441e7ee837f0617ffa34e0880a14f0e89e432e8f7c28f9421" gracePeriod=30 Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.529462 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vspzv\" (UniqueName: \"kubernetes.io/projected/794feb25-7ead-47e2-a28e-3f3b6e2f209d-kube-api-access-vspzv\") pod \"horizon-6b9f4ffc55-cnndq\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.587135 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-scripts\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.587324 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-logs\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.587359 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-config-data\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.587419 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-horizon-secret-key\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.587486 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4bx8\" (UniqueName: \"kubernetes.io/projected/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-kube-api-access-k4bx8\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.661585 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.689236 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-horizon-secret-key\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.689312 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4bx8\" (UniqueName: \"kubernetes.io/projected/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-kube-api-access-k4bx8\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.689391 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-scripts\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.689468 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-logs\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.689485 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-config-data\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.689965 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-logs\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.691129 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-config-data\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.694091 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-horizon-secret-key\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.694157 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-scripts\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.709111 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4bx8\" (UniqueName: \"kubernetes.io/projected/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-kube-api-access-k4bx8\") pod \"horizon-65987c9657-tmn5s\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:20 crc kubenswrapper[4816]: I0216 14:40:20.766353 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.053615 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-65987c9657-tmn5s"] Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.057889 4816 generic.go:334] "Generic (PLEG): container finished" podID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerID="5962c18ea0aa4a789e7d1f8743586abb8a592c8960bb2701414b7b3d45c0c5ac" exitCode=143 Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.058040 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4f499d0d-a2bc-4c0d-be4c-344346319421","Type":"ContainerDied","Data":"5962c18ea0aa4a789e7d1f8743586abb8a592c8960bb2701414b7b3d45c0c5ac"} Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.063326 4816 generic.go:334] "Generic (PLEG): container finished" podID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerID="43413c47567c8111914ba4a1037a4cc0f5821d9898526a33e2c41c621a102d49" exitCode=143 Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.063389 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36d355ac-ed74-4343-9ddd-e36e5166df83","Type":"ContainerDied","Data":"43413c47567c8111914ba4a1037a4cc0f5821d9898526a33e2c41c621a102d49"} Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.085704 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-8bdf5995c-wjvtc"] Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.087436 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.114507 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8bdf5995c-wjvtc"] Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.164505 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6b9f4ffc55-cnndq"] Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.199596 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-config-data\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.199704 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f775b181-bb2d-408d-81ac-2bdc2046184c-logs\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.199772 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f775b181-bb2d-408d-81ac-2bdc2046184c-horizon-secret-key\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.199844 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-scripts\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.199870 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkmnj\" (UniqueName: \"kubernetes.io/projected/f775b181-bb2d-408d-81ac-2bdc2046184c-kube-api-access-pkmnj\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.288897 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-65987c9657-tmn5s"] Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.301569 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-config-data\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.301681 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f775b181-bb2d-408d-81ac-2bdc2046184c-logs\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.301728 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f775b181-bb2d-408d-81ac-2bdc2046184c-horizon-secret-key\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.301800 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-scripts\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.301829 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkmnj\" (UniqueName: \"kubernetes.io/projected/f775b181-bb2d-408d-81ac-2bdc2046184c-kube-api-access-pkmnj\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.302399 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f775b181-bb2d-408d-81ac-2bdc2046184c-logs\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.303097 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-scripts\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.303321 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-config-data\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.310056 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f775b181-bb2d-408d-81ac-2bdc2046184c-horizon-secret-key\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.317934 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkmnj\" (UniqueName: \"kubernetes.io/projected/f775b181-bb2d-408d-81ac-2bdc2046184c-kube-api-access-pkmnj\") pod \"horizon-8bdf5995c-wjvtc\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.429258 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:21 crc kubenswrapper[4816]: I0216 14:40:21.901176 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8bdf5995c-wjvtc"] Feb 16 14:40:21 crc kubenswrapper[4816]: W0216 14:40:21.912340 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf775b181_bb2d_408d_81ac_2bdc2046184c.slice/crio-f137b95c7f16a315265ca3df23cef9f9bd7add3c4324cdc97c9fb201daab8742 WatchSource:0}: Error finding container f137b95c7f16a315265ca3df23cef9f9bd7add3c4324cdc97c9fb201daab8742: Status 404 returned error can't find the container with id f137b95c7f16a315265ca3df23cef9f9bd7add3c4324cdc97c9fb201daab8742 Feb 16 14:40:22 crc kubenswrapper[4816]: I0216 14:40:22.083434 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8bdf5995c-wjvtc" event={"ID":"f775b181-bb2d-408d-81ac-2bdc2046184c","Type":"ContainerStarted","Data":"f137b95c7f16a315265ca3df23cef9f9bd7add3c4324cdc97c9fb201daab8742"} Feb 16 14:40:22 crc kubenswrapper[4816]: I0216 14:40:22.085213 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65987c9657-tmn5s" event={"ID":"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671","Type":"ContainerStarted","Data":"46a735b28ec04225f6deb974721182605914923d606d57bcaa3b856b0985a7d2"} Feb 16 14:40:22 crc kubenswrapper[4816]: I0216 14:40:22.087517 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9f4ffc55-cnndq" event={"ID":"794feb25-7ead-47e2-a28e-3f3b6e2f209d","Type":"ContainerStarted","Data":"7c06b6eb84f2a11008a8bbc99ca59c0acddbed829c45b25e952d4f0d3ebe2805"} Feb 16 14:40:24 crc kubenswrapper[4816]: I0216 14:40:24.112419 4816 generic.go:334] "Generic (PLEG): container finished" podID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerID="89ea358324e95e1441e7ee837f0617ffa34e0880a14f0e89e432e8f7c28f9421" exitCode=0 Feb 16 14:40:24 crc kubenswrapper[4816]: I0216 14:40:24.112493 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4f499d0d-a2bc-4c0d-be4c-344346319421","Type":"ContainerDied","Data":"89ea358324e95e1441e7ee837f0617ffa34e0880a14f0e89e432e8f7c28f9421"} Feb 16 14:40:24 crc kubenswrapper[4816]: I0216 14:40:24.115848 4816 generic.go:334] "Generic (PLEG): container finished" podID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerID="45901d657d087e3bd7cf2781c50be19b30636d3b0ad263acb29ec6c8e41ea381" exitCode=0 Feb 16 14:40:24 crc kubenswrapper[4816]: I0216 14:40:24.115904 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36d355ac-ed74-4343-9ddd-e36e5166df83","Type":"ContainerDied","Data":"45901d657d087e3bd7cf2781c50be19b30636d3b0ad263acb29ec6c8e41ea381"} Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.542133 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.612846 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-logs\") pod \"4f499d0d-a2bc-4c0d-be4c-344346319421\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.612929 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-combined-ca-bundle\") pod \"4f499d0d-a2bc-4c0d-be4c-344346319421\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.613038 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-config-data\") pod \"4f499d0d-a2bc-4c0d-be4c-344346319421\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.613157 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwlbd\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-kube-api-access-qwlbd\") pod \"4f499d0d-a2bc-4c0d-be4c-344346319421\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.613276 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-httpd-run\") pod \"4f499d0d-a2bc-4c0d-be4c-344346319421\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.613301 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-ceph\") pod \"4f499d0d-a2bc-4c0d-be4c-344346319421\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.613359 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-scripts\") pod \"4f499d0d-a2bc-4c0d-be4c-344346319421\" (UID: \"4f499d0d-a2bc-4c0d-be4c-344346319421\") " Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.613799 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4f499d0d-a2bc-4c0d-be4c-344346319421" (UID: "4f499d0d-a2bc-4c0d-be4c-344346319421"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.613986 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-logs" (OuterVolumeSpecName: "logs") pod "4f499d0d-a2bc-4c0d-be4c-344346319421" (UID: "4f499d0d-a2bc-4c0d-be4c-344346319421"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.614372 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.614387 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f499d0d-a2bc-4c0d-be4c-344346319421-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.619020 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-scripts" (OuterVolumeSpecName: "scripts") pod "4f499d0d-a2bc-4c0d-be4c-344346319421" (UID: "4f499d0d-a2bc-4c0d-be4c-344346319421"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.619212 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-ceph" (OuterVolumeSpecName: "ceph") pod "4f499d0d-a2bc-4c0d-be4c-344346319421" (UID: "4f499d0d-a2bc-4c0d-be4c-344346319421"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.619488 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-kube-api-access-qwlbd" (OuterVolumeSpecName: "kube-api-access-qwlbd") pod "4f499d0d-a2bc-4c0d-be4c-344346319421" (UID: "4f499d0d-a2bc-4c0d-be4c-344346319421"). InnerVolumeSpecName "kube-api-access-qwlbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.661383 4816 scope.go:117] "RemoveContainer" containerID="5962c18ea0aa4a789e7d1f8743586abb8a592c8960bb2701414b7b3d45c0c5ac" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.662011 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f499d0d-a2bc-4c0d-be4c-344346319421" (UID: "4f499d0d-a2bc-4c0d-be4c-344346319421"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.715314 4816 scope.go:117] "RemoveContainer" containerID="89ea358324e95e1441e7ee837f0617ffa34e0880a14f0e89e432e8f7c28f9421" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.718423 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-config-data" (OuterVolumeSpecName: "config-data") pod "4f499d0d-a2bc-4c0d-be4c-344346319421" (UID: "4f499d0d-a2bc-4c0d-be4c-344346319421"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.719623 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.719662 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.719674 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwlbd\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-kube-api-access-qwlbd\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.719684 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4f499d0d-a2bc-4c0d-be4c-344346319421-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.719693 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f499d0d-a2bc-4c0d-be4c-344346319421-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.765666 4816 scope.go:117] "RemoveContainer" containerID="5a9a4a7a46b1b068cd0b7663ef5953e5b9cf57e616eaf2c86988e4a4cd0ca156" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.799175 4816 scope.go:117] "RemoveContainer" containerID="dc81d85d46dd9e7d3069a295e2894ca0b7e7a61e9fb79bde5542b707cb635cf2" Feb 16 14:40:28 crc kubenswrapper[4816]: I0216 14:40:28.818929 4816 scope.go:117] "RemoveContainer" containerID="d558e015f955b773d0287fe2954b11fb127a4bf8d6b29f1b138522652419197b" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.179969 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8bdf5995c-wjvtc" event={"ID":"f775b181-bb2d-408d-81ac-2bdc2046184c","Type":"ContainerStarted","Data":"51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde"} Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.180105 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8bdf5995c-wjvtc" event={"ID":"f775b181-bb2d-408d-81ac-2bdc2046184c","Type":"ContainerStarted","Data":"19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412"} Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.181908 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65987c9657-tmn5s" event={"ID":"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671","Type":"ContainerStarted","Data":"9ddf753d75606d9d7f73cfc80af627b5bb57d147537106dfcf13e0f7b9ea0861"} Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.181963 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65987c9657-tmn5s" event={"ID":"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671","Type":"ContainerStarted","Data":"0de02733857398068aa3c24fa496511019506eedf3b9f229d6bf0d8d6619eff0"} Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.181983 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4f499d0d-a2bc-4c0d-be4c-344346319421","Type":"ContainerDied","Data":"7868bb6a114b83d90f21bbefc139c568f490d6a786c0f57451eccfad6f052a8c"} Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.182136 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-65987c9657-tmn5s" podUID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerName="horizon-log" containerID="cri-o://0de02733857398068aa3c24fa496511019506eedf3b9f229d6bf0d8d6619eff0" gracePeriod=30 Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.182442 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-65987c9657-tmn5s" podUID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerName="horizon" containerID="cri-o://9ddf753d75606d9d7f73cfc80af627b5bb57d147537106dfcf13e0f7b9ea0861" gracePeriod=30 Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.188118 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.189007 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9f4ffc55-cnndq" event={"ID":"794feb25-7ead-47e2-a28e-3f3b6e2f209d","Type":"ContainerStarted","Data":"a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46"} Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.189042 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9f4ffc55-cnndq" event={"ID":"794feb25-7ead-47e2-a28e-3f3b6e2f209d","Type":"ContainerStarted","Data":"84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b"} Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.208062 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-8bdf5995c-wjvtc" podStartSLOduration=1.874641715 podStartE2EDuration="8.20804094s" podCreationTimestamp="2026-02-16 14:40:21 +0000 UTC" firstStartedPulling="2026-02-16 14:40:21.916225363 +0000 UTC m=+5821.242939091" lastFinishedPulling="2026-02-16 14:40:28.249624598 +0000 UTC m=+5827.576338316" observedRunningTime="2026-02-16 14:40:29.200989808 +0000 UTC m=+5828.527703546" watchObservedRunningTime="2026-02-16 14:40:29.20804094 +0000 UTC m=+5828.534754678" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.243146 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6b9f4ffc55-cnndq" podStartSLOduration=2.18933989 podStartE2EDuration="9.243123048s" podCreationTimestamp="2026-02-16 14:40:20 +0000 UTC" firstStartedPulling="2026-02-16 14:40:21.166797469 +0000 UTC m=+5820.493511197" lastFinishedPulling="2026-02-16 14:40:28.220580627 +0000 UTC m=+5827.547294355" observedRunningTime="2026-02-16 14:40:29.2351519 +0000 UTC m=+5828.561865638" watchObservedRunningTime="2026-02-16 14:40:29.243123048 +0000 UTC m=+5828.569836776" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.295135 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-65987c9657-tmn5s" podStartSLOduration=2.355542342 podStartE2EDuration="9.295115755s" podCreationTimestamp="2026-02-16 14:40:20 +0000 UTC" firstStartedPulling="2026-02-16 14:40:21.28710056 +0000 UTC m=+5820.613814288" lastFinishedPulling="2026-02-16 14:40:28.226673973 +0000 UTC m=+5827.553387701" observedRunningTime="2026-02-16 14:40:29.280578578 +0000 UTC m=+5828.607292316" watchObservedRunningTime="2026-02-16 14:40:29.295115755 +0000 UTC m=+5828.621829483" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.314364 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.327607 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.343051 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:40:29 crc kubenswrapper[4816]: E0216 14:40:29.343515 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerName="glance-httpd" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.343540 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerName="glance-httpd" Feb 16 14:40:29 crc kubenswrapper[4816]: E0216 14:40:29.343568 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerName="glance-log" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.343574 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerName="glance-log" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.343792 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerName="glance-httpd" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.343812 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f499d0d-a2bc-4c0d-be4c-344346319421" containerName="glance-log" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.344862 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.350170 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.354563 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.414859 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f499d0d-a2bc-4c0d-be4c-344346319421" path="/var/lib/kubelet/pods/4f499d0d-a2bc-4c0d-be4c-344346319421/volumes" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.430103 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8dc52345-93e6-4c65-9617-f4dc9bdd5871-ceph\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.430170 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dc52345-93e6-4c65-9617-f4dc9bdd5871-scripts\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.430237 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dc52345-93e6-4c65-9617-f4dc9bdd5871-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.430414 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dc52345-93e6-4c65-9617-f4dc9bdd5871-config-data\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.430449 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8dc52345-93e6-4c65-9617-f4dc9bdd5871-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.430512 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jhsl\" (UniqueName: \"kubernetes.io/projected/8dc52345-93e6-4c65-9617-f4dc9bdd5871-kube-api-access-5jhsl\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.430577 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dc52345-93e6-4c65-9617-f4dc9bdd5871-logs\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.531770 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8dc52345-93e6-4c65-9617-f4dc9bdd5871-ceph\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.531839 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dc52345-93e6-4c65-9617-f4dc9bdd5871-scripts\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.531889 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dc52345-93e6-4c65-9617-f4dc9bdd5871-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.531959 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dc52345-93e6-4c65-9617-f4dc9bdd5871-config-data\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.531975 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8dc52345-93e6-4c65-9617-f4dc9bdd5871-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.532006 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jhsl\" (UniqueName: \"kubernetes.io/projected/8dc52345-93e6-4c65-9617-f4dc9bdd5871-kube-api-access-5jhsl\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.532050 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dc52345-93e6-4c65-9617-f4dc9bdd5871-logs\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.532861 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dc52345-93e6-4c65-9617-f4dc9bdd5871-logs\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.533497 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8dc52345-93e6-4c65-9617-f4dc9bdd5871-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.541063 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dc52345-93e6-4c65-9617-f4dc9bdd5871-scripts\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.542964 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dc52345-93e6-4c65-9617-f4dc9bdd5871-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.543541 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8dc52345-93e6-4c65-9617-f4dc9bdd5871-ceph\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.544032 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dc52345-93e6-4c65-9617-f4dc9bdd5871-config-data\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.550023 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jhsl\" (UniqueName: \"kubernetes.io/projected/8dc52345-93e6-4c65-9617-f4dc9bdd5871-kube-api-access-5jhsl\") pod \"glance-default-external-api-0\" (UID: \"8dc52345-93e6-4c65-9617-f4dc9bdd5871\") " pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.613350 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.633831 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-httpd-run\") pod \"36d355ac-ed74-4343-9ddd-e36e5166df83\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.633895 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-config-data\") pod \"36d355ac-ed74-4343-9ddd-e36e5166df83\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.633950 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-ceph\") pod \"36d355ac-ed74-4343-9ddd-e36e5166df83\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.633999 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwx66\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-kube-api-access-jwx66\") pod \"36d355ac-ed74-4343-9ddd-e36e5166df83\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.634029 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-scripts\") pod \"36d355ac-ed74-4343-9ddd-e36e5166df83\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.634072 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-combined-ca-bundle\") pod \"36d355ac-ed74-4343-9ddd-e36e5166df83\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.634136 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-logs\") pod \"36d355ac-ed74-4343-9ddd-e36e5166df83\" (UID: \"36d355ac-ed74-4343-9ddd-e36e5166df83\") " Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.634292 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "36d355ac-ed74-4343-9ddd-e36e5166df83" (UID: "36d355ac-ed74-4343-9ddd-e36e5166df83"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.634868 4816 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.634924 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-logs" (OuterVolumeSpecName: "logs") pod "36d355ac-ed74-4343-9ddd-e36e5166df83" (UID: "36d355ac-ed74-4343-9ddd-e36e5166df83"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.654890 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-kube-api-access-jwx66" (OuterVolumeSpecName: "kube-api-access-jwx66") pod "36d355ac-ed74-4343-9ddd-e36e5166df83" (UID: "36d355ac-ed74-4343-9ddd-e36e5166df83"). InnerVolumeSpecName "kube-api-access-jwx66". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.655829 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-scripts" (OuterVolumeSpecName: "scripts") pod "36d355ac-ed74-4343-9ddd-e36e5166df83" (UID: "36d355ac-ed74-4343-9ddd-e36e5166df83"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.658238 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-ceph" (OuterVolumeSpecName: "ceph") pod "36d355ac-ed74-4343-9ddd-e36e5166df83" (UID: "36d355ac-ed74-4343-9ddd-e36e5166df83"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.678282 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.684916 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36d355ac-ed74-4343-9ddd-e36e5166df83" (UID: "36d355ac-ed74-4343-9ddd-e36e5166df83"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.712011 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-config-data" (OuterVolumeSpecName: "config-data") pod "36d355ac-ed74-4343-9ddd-e36e5166df83" (UID: "36d355ac-ed74-4343-9ddd-e36e5166df83"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.740365 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.740410 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.740426 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwx66\" (UniqueName: \"kubernetes.io/projected/36d355ac-ed74-4343-9ddd-e36e5166df83-kube-api-access-jwx66\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.740441 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.740457 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d355ac-ed74-4343-9ddd-e36e5166df83-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:29 crc kubenswrapper[4816]: I0216 14:40:29.740470 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36d355ac-ed74-4343-9ddd-e36e5166df83-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.206014 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36d355ac-ed74-4343-9ddd-e36e5166df83","Type":"ContainerDied","Data":"d447f4dd67f02dfce175168a28c1762c39d9b8959ad7c930435bc720f8ea4136"} Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.206096 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.206315 4816 scope.go:117] "RemoveContainer" containerID="45901d657d087e3bd7cf2781c50be19b30636d3b0ad263acb29ec6c8e41ea381" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.256555 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.282741 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.283274 4816 scope.go:117] "RemoveContainer" containerID="43413c47567c8111914ba4a1037a4cc0f5821d9898526a33e2c41c621a102d49" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.327731 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:40:30 crc kubenswrapper[4816]: E0216 14:40:30.328280 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerName="glance-httpd" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.328308 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerName="glance-httpd" Feb 16 14:40:30 crc kubenswrapper[4816]: E0216 14:40:30.328333 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerName="glance-log" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.328343 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerName="glance-log" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.328617 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerName="glance-log" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.328700 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="36d355ac-ed74-4343-9ddd-e36e5166df83" containerName="glance-httpd" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.330007 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.333211 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.345186 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.352531 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea609be-3e59-43a4-8a67-5c3f5a427489-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.352587 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eea609be-3e59-43a4-8a67-5c3f5a427489-logs\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.352635 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w57bk\" (UniqueName: \"kubernetes.io/projected/eea609be-3e59-43a4-8a67-5c3f5a427489-kube-api-access-w57bk\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.352700 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea609be-3e59-43a4-8a67-5c3f5a427489-config-data\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.352738 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eea609be-3e59-43a4-8a67-5c3f5a427489-ceph\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.352801 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea609be-3e59-43a4-8a67-5c3f5a427489-scripts\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.352900 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eea609be-3e59-43a4-8a67-5c3f5a427489-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.358344 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.454394 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eea609be-3e59-43a4-8a67-5c3f5a427489-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.454460 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea609be-3e59-43a4-8a67-5c3f5a427489-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.454486 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eea609be-3e59-43a4-8a67-5c3f5a427489-logs\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.454520 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w57bk\" (UniqueName: \"kubernetes.io/projected/eea609be-3e59-43a4-8a67-5c3f5a427489-kube-api-access-w57bk\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.454549 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea609be-3e59-43a4-8a67-5c3f5a427489-config-data\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.454579 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eea609be-3e59-43a4-8a67-5c3f5a427489-ceph\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.454628 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea609be-3e59-43a4-8a67-5c3f5a427489-scripts\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.455036 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eea609be-3e59-43a4-8a67-5c3f5a427489-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.455278 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eea609be-3e59-43a4-8a67-5c3f5a427489-logs\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.464565 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea609be-3e59-43a4-8a67-5c3f5a427489-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.467943 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/eea609be-3e59-43a4-8a67-5c3f5a427489-ceph\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.475139 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea609be-3e59-43a4-8a67-5c3f5a427489-scripts\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.476018 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea609be-3e59-43a4-8a67-5c3f5a427489-config-data\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.476524 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w57bk\" (UniqueName: \"kubernetes.io/projected/eea609be-3e59-43a4-8a67-5c3f5a427489-kube-api-access-w57bk\") pod \"glance-default-internal-api-0\" (UID: \"eea609be-3e59-43a4-8a67-5c3f5a427489\") " pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.662513 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.662583 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.740055 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:30 crc kubenswrapper[4816]: I0216 14:40:30.766904 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:31 crc kubenswrapper[4816]: I0216 14:40:31.220893 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8dc52345-93e6-4c65-9617-f4dc9bdd5871","Type":"ContainerStarted","Data":"f05face88a2f1741fb3787a50708267ce22f74726b3c0dbeedee0c0c93c4e81a"} Feb 16 14:40:31 crc kubenswrapper[4816]: I0216 14:40:31.221458 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8dc52345-93e6-4c65-9617-f4dc9bdd5871","Type":"ContainerStarted","Data":"92c044b54662893a2848669af3b8e873d3c48efed00975f17b6f9ccbbbd31373"} Feb 16 14:40:31 crc kubenswrapper[4816]: I0216 14:40:31.344590 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 16 14:40:31 crc kubenswrapper[4816]: I0216 14:40:31.423543 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36d355ac-ed74-4343-9ddd-e36e5166df83" path="/var/lib/kubelet/pods/36d355ac-ed74-4343-9ddd-e36e5166df83/volumes" Feb 16 14:40:31 crc kubenswrapper[4816]: I0216 14:40:31.455688 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:31 crc kubenswrapper[4816]: I0216 14:40:31.455826 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:32 crc kubenswrapper[4816]: I0216 14:40:32.231921 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eea609be-3e59-43a4-8a67-5c3f5a427489","Type":"ContainerStarted","Data":"068b6778c6b50a8b73fc748d4ef8ad8e67048e7b546091bd9a9442094e3282ff"} Feb 16 14:40:32 crc kubenswrapper[4816]: I0216 14:40:32.232236 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eea609be-3e59-43a4-8a67-5c3f5a427489","Type":"ContainerStarted","Data":"c2fb207d6e66575a902b1a1a247680d5695f83331c592b0d36f20f3d92a4affa"} Feb 16 14:40:32 crc kubenswrapper[4816]: I0216 14:40:32.235321 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8dc52345-93e6-4c65-9617-f4dc9bdd5871","Type":"ContainerStarted","Data":"d1e2f97942015cc4a78c0753313452403d3f8d74db6a0cc494a81d4f8e8cfb38"} Feb 16 14:40:32 crc kubenswrapper[4816]: I0216 14:40:32.267941 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.26784574 podStartE2EDuration="3.26784574s" podCreationTimestamp="2026-02-16 14:40:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:40:32.25461629 +0000 UTC m=+5831.581330018" watchObservedRunningTime="2026-02-16 14:40:32.26784574 +0000 UTC m=+5831.594559468" Feb 16 14:40:33 crc kubenswrapper[4816]: I0216 14:40:33.248235 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eea609be-3e59-43a4-8a67-5c3f5a427489","Type":"ContainerStarted","Data":"d675bab077037a0db5d42c84b2485019d6e344983b3723dd252ebd52e3fe28a7"} Feb 16 14:40:33 crc kubenswrapper[4816]: I0216 14:40:33.266812 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.266790887 podStartE2EDuration="3.266790887s" podCreationTimestamp="2026-02-16 14:40:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:40:33.2646836 +0000 UTC m=+5832.591397328" watchObservedRunningTime="2026-02-16 14:40:33.266790887 +0000 UTC m=+5832.593504615" Feb 16 14:40:36 crc kubenswrapper[4816]: I0216 14:40:36.057346 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8983-account-create-update-c6djt"] Feb 16 14:40:36 crc kubenswrapper[4816]: I0216 14:40:36.069336 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-z9gf4"] Feb 16 14:40:36 crc kubenswrapper[4816]: I0216 14:40:36.079849 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-z9gf4"] Feb 16 14:40:36 crc kubenswrapper[4816]: I0216 14:40:36.090158 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8983-account-create-update-c6djt"] Feb 16 14:40:37 crc kubenswrapper[4816]: I0216 14:40:37.423106 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="842ab8ea-5656-4bb5-8553-db49b36e48d2" path="/var/lib/kubelet/pods/842ab8ea-5656-4bb5-8553-db49b36e48d2/volumes" Feb 16 14:40:37 crc kubenswrapper[4816]: I0216 14:40:37.424256 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6a37227-d45e-4faa-99a5-9a5d9e8ed031" path="/var/lib/kubelet/pods/e6a37227-d45e-4faa-99a5-9a5d9e8ed031/volumes" Feb 16 14:40:39 crc kubenswrapper[4816]: I0216 14:40:39.679184 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 16 14:40:39 crc kubenswrapper[4816]: I0216 14:40:39.679515 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 16 14:40:39 crc kubenswrapper[4816]: I0216 14:40:39.721061 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 16 14:40:39 crc kubenswrapper[4816]: I0216 14:40:39.746210 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 16 14:40:40 crc kubenswrapper[4816]: I0216 14:40:40.329102 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 16 14:40:40 crc kubenswrapper[4816]: I0216 14:40:40.329965 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 16 14:40:40 crc kubenswrapper[4816]: I0216 14:40:40.665157 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6b9f4ffc55-cnndq" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Feb 16 14:40:40 crc kubenswrapper[4816]: I0216 14:40:40.740467 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:40 crc kubenswrapper[4816]: I0216 14:40:40.740561 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:40 crc kubenswrapper[4816]: I0216 14:40:40.787893 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:40 crc kubenswrapper[4816]: I0216 14:40:40.798462 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:41 crc kubenswrapper[4816]: I0216 14:40:41.044970 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-k45zc"] Feb 16 14:40:41 crc kubenswrapper[4816]: I0216 14:40:41.056639 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-k45zc"] Feb 16 14:40:41 crc kubenswrapper[4816]: I0216 14:40:41.336017 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:41 crc kubenswrapper[4816]: I0216 14:40:41.336060 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:41 crc kubenswrapper[4816]: I0216 14:40:41.410014 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15fb76bc-c799-466a-9a76-19316fa40857" path="/var/lib/kubelet/pods/15fb76bc-c799-466a-9a76-19316fa40857/volumes" Feb 16 14:40:41 crc kubenswrapper[4816]: I0216 14:40:41.432146 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8bdf5995c-wjvtc" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.110:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.110:8080: connect: connection refused" Feb 16 14:40:42 crc kubenswrapper[4816]: I0216 14:40:42.345464 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 14:40:42 crc kubenswrapper[4816]: I0216 14:40:42.345736 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 14:40:42 crc kubenswrapper[4816]: I0216 14:40:42.577967 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 16 14:40:42 crc kubenswrapper[4816]: I0216 14:40:42.581183 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 16 14:40:43 crc kubenswrapper[4816]: I0216 14:40:43.389281 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:43 crc kubenswrapper[4816]: I0216 14:40:43.389674 4816 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 16 14:40:43 crc kubenswrapper[4816]: I0216 14:40:43.390565 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 16 14:40:52 crc kubenswrapper[4816]: I0216 14:40:52.458124 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:53 crc kubenswrapper[4816]: I0216 14:40:53.350538 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:54 crc kubenswrapper[4816]: I0216 14:40:54.185879 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:40:55 crc kubenswrapper[4816]: I0216 14:40:55.092364 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:40:55 crc kubenswrapper[4816]: I0216 14:40:55.155773 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6b9f4ffc55-cnndq"] Feb 16 14:40:55 crc kubenswrapper[4816]: I0216 14:40:55.156037 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6b9f4ffc55-cnndq" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon-log" containerID="cri-o://84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b" gracePeriod=30 Feb 16 14:40:55 crc kubenswrapper[4816]: I0216 14:40:55.156138 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6b9f4ffc55-cnndq" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon" containerID="cri-o://a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46" gracePeriod=30 Feb 16 14:40:58 crc kubenswrapper[4816]: I0216 14:40:58.517299 4816 generic.go:334] "Generic (PLEG): container finished" podID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerID="a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46" exitCode=0 Feb 16 14:40:58 crc kubenswrapper[4816]: I0216 14:40:58.517404 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9f4ffc55-cnndq" event={"ID":"794feb25-7ead-47e2-a28e-3f3b6e2f209d","Type":"ContainerDied","Data":"a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46"} Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.527815 4816 generic.go:334] "Generic (PLEG): container finished" podID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerID="9ddf753d75606d9d7f73cfc80af627b5bb57d147537106dfcf13e0f7b9ea0861" exitCode=137 Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.528113 4816 generic.go:334] "Generic (PLEG): container finished" podID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerID="0de02733857398068aa3c24fa496511019506eedf3b9f229d6bf0d8d6619eff0" exitCode=137 Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.527911 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65987c9657-tmn5s" event={"ID":"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671","Type":"ContainerDied","Data":"9ddf753d75606d9d7f73cfc80af627b5bb57d147537106dfcf13e0f7b9ea0861"} Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.528150 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65987c9657-tmn5s" event={"ID":"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671","Type":"ContainerDied","Data":"0de02733857398068aa3c24fa496511019506eedf3b9f229d6bf0d8d6619eff0"} Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.528164 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65987c9657-tmn5s" event={"ID":"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671","Type":"ContainerDied","Data":"46a735b28ec04225f6deb974721182605914923d606d57bcaa3b856b0985a7d2"} Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.528174 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46a735b28ec04225f6deb974721182605914923d606d57bcaa3b856b0985a7d2" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.597974 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.626413 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-logs\") pod \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.626510 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-scripts\") pod \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.626555 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-horizon-secret-key\") pod \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.626588 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-config-data\") pod \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.626659 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4bx8\" (UniqueName: \"kubernetes.io/projected/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-kube-api-access-k4bx8\") pod \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\" (UID: \"dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671\") " Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.632026 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-kube-api-access-k4bx8" (OuterVolumeSpecName: "kube-api-access-k4bx8") pod "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" (UID: "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671"). InnerVolumeSpecName "kube-api-access-k4bx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.634281 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-logs" (OuterVolumeSpecName: "logs") pod "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" (UID: "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.635679 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" (UID: "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.653345 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-scripts" (OuterVolumeSpecName: "scripts") pod "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" (UID: "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.660534 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-config-data" (OuterVolumeSpecName: "config-data") pod "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" (UID: "dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.729090 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.729128 4816 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.729140 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.729152 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4bx8\" (UniqueName: \"kubernetes.io/projected/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-kube-api-access-k4bx8\") on node \"crc\" DevicePath \"\"" Feb 16 14:40:59 crc kubenswrapper[4816]: I0216 14:40:59.729163 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:00 crc kubenswrapper[4816]: I0216 14:41:00.537353 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65987c9657-tmn5s" Feb 16 14:41:00 crc kubenswrapper[4816]: I0216 14:41:00.577259 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-65987c9657-tmn5s"] Feb 16 14:41:00 crc kubenswrapper[4816]: I0216 14:41:00.591979 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-65987c9657-tmn5s"] Feb 16 14:41:00 crc kubenswrapper[4816]: I0216 14:41:00.662821 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6b9f4ffc55-cnndq" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Feb 16 14:41:01 crc kubenswrapper[4816]: I0216 14:41:01.411483 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" path="/var/lib/kubelet/pods/dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671/volumes" Feb 16 14:41:10 crc kubenswrapper[4816]: I0216 14:41:10.050039 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-3207-account-create-update-jlqpt"] Feb 16 14:41:10 crc kubenswrapper[4816]: I0216 14:41:10.060571 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-3207-account-create-update-jlqpt"] Feb 16 14:41:10 crc kubenswrapper[4816]: I0216 14:41:10.068870 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-bd6tx"] Feb 16 14:41:10 crc kubenswrapper[4816]: I0216 14:41:10.077479 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-bd6tx"] Feb 16 14:41:10 crc kubenswrapper[4816]: I0216 14:41:10.663119 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6b9f4ffc55-cnndq" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Feb 16 14:41:11 crc kubenswrapper[4816]: I0216 14:41:11.416707 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bca9d4f-055f-4668-915a-a4a35abf5be7" path="/var/lib/kubelet/pods/1bca9d4f-055f-4668-915a-a4a35abf5be7/volumes" Feb 16 14:41:11 crc kubenswrapper[4816]: I0216 14:41:11.417502 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ec5e35f-3654-40f7-97ad-29ef5097d445" path="/var/lib/kubelet/pods/2ec5e35f-3654-40f7-97ad-29ef5097d445/volumes" Feb 16 14:41:18 crc kubenswrapper[4816]: I0216 14:41:18.027571 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-ch7fv"] Feb 16 14:41:18 crc kubenswrapper[4816]: I0216 14:41:18.038470 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-ch7fv"] Feb 16 14:41:19 crc kubenswrapper[4816]: I0216 14:41:19.412103 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77203e34-09b4-4000-a542-96fbad2a4eba" path="/var/lib/kubelet/pods/77203e34-09b4-4000-a542-96fbad2a4eba/volumes" Feb 16 14:41:20 crc kubenswrapper[4816]: I0216 14:41:20.662734 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6b9f4ffc55-cnndq" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Feb 16 14:41:20 crc kubenswrapper[4816]: I0216 14:41:20.663163 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.562009 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.578079 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794feb25-7ead-47e2-a28e-3f3b6e2f209d-logs\") pod \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.579253 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/794feb25-7ead-47e2-a28e-3f3b6e2f209d-logs" (OuterVolumeSpecName: "logs") pod "794feb25-7ead-47e2-a28e-3f3b6e2f209d" (UID: "794feb25-7ead-47e2-a28e-3f3b6e2f209d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.580993 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-scripts\") pod \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.581154 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/794feb25-7ead-47e2-a28e-3f3b6e2f209d-horizon-secret-key\") pod \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.581182 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vspzv\" (UniqueName: \"kubernetes.io/projected/794feb25-7ead-47e2-a28e-3f3b6e2f209d-kube-api-access-vspzv\") pod \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.581242 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-config-data\") pod \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\" (UID: \"794feb25-7ead-47e2-a28e-3f3b6e2f209d\") " Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.590153 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794feb25-7ead-47e2-a28e-3f3b6e2f209d-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.595210 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794feb25-7ead-47e2-a28e-3f3b6e2f209d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "794feb25-7ead-47e2-a28e-3f3b6e2f209d" (UID: "794feb25-7ead-47e2-a28e-3f3b6e2f209d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.600714 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/794feb25-7ead-47e2-a28e-3f3b6e2f209d-kube-api-access-vspzv" (OuterVolumeSpecName: "kube-api-access-vspzv") pod "794feb25-7ead-47e2-a28e-3f3b6e2f209d" (UID: "794feb25-7ead-47e2-a28e-3f3b6e2f209d"). InnerVolumeSpecName "kube-api-access-vspzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.621746 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-scripts" (OuterVolumeSpecName: "scripts") pod "794feb25-7ead-47e2-a28e-3f3b6e2f209d" (UID: "794feb25-7ead-47e2-a28e-3f3b6e2f209d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.632766 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-config-data" (OuterVolumeSpecName: "config-data") pod "794feb25-7ead-47e2-a28e-3f3b6e2f209d" (UID: "794feb25-7ead-47e2-a28e-3f3b6e2f209d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.692877 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.692931 4816 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/794feb25-7ead-47e2-a28e-3f3b6e2f209d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.692956 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vspzv\" (UniqueName: \"kubernetes.io/projected/794feb25-7ead-47e2-a28e-3f3b6e2f209d-kube-api-access-vspzv\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.692975 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/794feb25-7ead-47e2-a28e-3f3b6e2f209d-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.764936 4816 generic.go:334] "Generic (PLEG): container finished" podID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerID="84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b" exitCode=137 Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.764990 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9f4ffc55-cnndq" event={"ID":"794feb25-7ead-47e2-a28e-3f3b6e2f209d","Type":"ContainerDied","Data":"84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b"} Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.765021 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9f4ffc55-cnndq" event={"ID":"794feb25-7ead-47e2-a28e-3f3b6e2f209d","Type":"ContainerDied","Data":"7c06b6eb84f2a11008a8bbc99ca59c0acddbed829c45b25e952d4f0d3ebe2805"} Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.765030 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9f4ffc55-cnndq" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.765042 4816 scope.go:117] "RemoveContainer" containerID="a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.837366 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6b9f4ffc55-cnndq"] Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.848517 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6b9f4ffc55-cnndq"] Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.969291 4816 scope.go:117] "RemoveContainer" containerID="84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.994675 4816 scope.go:117] "RemoveContainer" containerID="a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46" Feb 16 14:41:25 crc kubenswrapper[4816]: E0216 14:41:25.995325 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46\": container with ID starting with a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46 not found: ID does not exist" containerID="a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.995365 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46"} err="failed to get container status \"a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46\": rpc error: code = NotFound desc = could not find container \"a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46\": container with ID starting with a71672ce82f4f0982f259a4fff66b2070ea502f7d5ab397c4683cb614ef94b46 not found: ID does not exist" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.995402 4816 scope.go:117] "RemoveContainer" containerID="84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b" Feb 16 14:41:25 crc kubenswrapper[4816]: E0216 14:41:25.995799 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b\": container with ID starting with 84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b not found: ID does not exist" containerID="84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b" Feb 16 14:41:25 crc kubenswrapper[4816]: I0216 14:41:25.995828 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b"} err="failed to get container status \"84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b\": rpc error: code = NotFound desc = could not find container \"84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b\": container with ID starting with 84323a06bbde7c9b05094820d47e8434fdb06b50dc3eadb01fb09cf6710de54b not found: ID does not exist" Feb 16 14:41:27 crc kubenswrapper[4816]: I0216 14:41:27.408717 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" path="/var/lib/kubelet/pods/794feb25-7ead-47e2-a28e-3f3b6e2f209d/volumes" Feb 16 14:41:28 crc kubenswrapper[4816]: I0216 14:41:28.910777 4816 scope.go:117] "RemoveContainer" containerID="4d5b1ee8e39858dce810c13dde473b4519f86758c7c2e522083e7811a6a5d893" Feb 16 14:41:28 crc kubenswrapper[4816]: I0216 14:41:28.932870 4816 scope.go:117] "RemoveContainer" containerID="d1e5505c71cc4204e408a903b5c207ad34a0a8329930dd2ef11a033b9cf5dbb2" Feb 16 14:41:28 crc kubenswrapper[4816]: I0216 14:41:28.955983 4816 scope.go:117] "RemoveContainer" containerID="95ad177b2ea16b592352bf281ef9cfb4c8bcf81bb6fb1fa75e2eadf11d827fc2" Feb 16 14:41:28 crc kubenswrapper[4816]: I0216 14:41:28.979924 4816 scope.go:117] "RemoveContainer" containerID="ac814d592aca6efafcbb2056b1ebd67f9c8350f2058884344e2ce5010758b8f0" Feb 16 14:41:29 crc kubenswrapper[4816]: I0216 14:41:29.026513 4816 scope.go:117] "RemoveContainer" containerID="405bf524d4286d584d4870fdb842524905a41069c2513add28e4557071d1efec" Feb 16 14:41:29 crc kubenswrapper[4816]: I0216 14:41:29.077326 4816 scope.go:117] "RemoveContainer" containerID="9379f1cfeeb32e5dc1339f40981afc4d2b9c76015b27901908c083ef4d620b14" Feb 16 14:41:29 crc kubenswrapper[4816]: I0216 14:41:29.149523 4816 scope.go:117] "RemoveContainer" containerID="887a2181add8831400b68ef95898bb744445170a2e115b7be3df4a78e65e2730" Feb 16 14:41:29 crc kubenswrapper[4816]: I0216 14:41:29.171248 4816 scope.go:117] "RemoveContainer" containerID="d5477e971c03b576fbfcb0274378b2b57284409fe81d9b2edc43a2688022ff2f" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.331144 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-76546b4c8f-lxsz6"] Feb 16 14:41:37 crc kubenswrapper[4816]: E0216 14:41:37.333921 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon-log" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.334046 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon-log" Feb 16 14:41:37 crc kubenswrapper[4816]: E0216 14:41:37.334163 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.334244 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon" Feb 16 14:41:37 crc kubenswrapper[4816]: E0216 14:41:37.334334 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerName="horizon-log" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.334419 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerName="horizon-log" Feb 16 14:41:37 crc kubenswrapper[4816]: E0216 14:41:37.334537 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerName="horizon" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.334626 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerName="horizon" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.334963 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.335088 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="794feb25-7ead-47e2-a28e-3f3b6e2f209d" containerName="horizon-log" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.336240 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerName="horizon-log" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.336367 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcc2c72c-8d1a-4d40-b1a9-7ce9874c4671" containerName="horizon" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.340823 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.350639 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-76546b4c8f-lxsz6"] Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.449500 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-scripts\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.449576 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-horizon-secret-key\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.449720 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg56l\" (UniqueName: \"kubernetes.io/projected/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-kube-api-access-fg56l\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.450445 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-config-data\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.450611 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-logs\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.552532 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-config-data\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.552636 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-logs\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.552720 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-scripts\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.552787 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-horizon-secret-key\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.552810 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg56l\" (UniqueName: \"kubernetes.io/projected/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-kube-api-access-fg56l\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.553251 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-logs\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.554126 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-config-data\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.554173 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-scripts\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.559746 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-horizon-secret-key\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.576075 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg56l\" (UniqueName: \"kubernetes.io/projected/545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554-kube-api-access-fg56l\") pod \"horizon-76546b4c8f-lxsz6\" (UID: \"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554\") " pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:37 crc kubenswrapper[4816]: I0216 14:41:37.671574 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.197618 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-76546b4c8f-lxsz6"] Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.720566 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-ctdck"] Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.723732 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-ctdck" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.743898 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-ctdck"] Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.799357 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-e296-account-create-update-k4wgc"] Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.800785 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.806851 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.812030 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-e296-account-create-update-k4wgc"] Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.879184 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-operator-scripts\") pod \"heat-db-create-ctdck\" (UID: \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\") " pod="openstack/heat-db-create-ctdck" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.881096 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkk5c\" (UniqueName: \"kubernetes.io/projected/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-kube-api-access-vkk5c\") pod \"heat-db-create-ctdck\" (UID: \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\") " pod="openstack/heat-db-create-ctdck" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.887521 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76546b4c8f-lxsz6" event={"ID":"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554","Type":"ContainerStarted","Data":"0e0553b5abc6dfec8e430ab444a11b587b70afb9dec7da5e973a051c4b11bd9f"} Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.887562 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76546b4c8f-lxsz6" event={"ID":"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554","Type":"ContainerStarted","Data":"1ca09fdd83516a674e1088bb208b632b9523368f4278af3fb0f76a912dfda914"} Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.887572 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76546b4c8f-lxsz6" event={"ID":"545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554","Type":"ContainerStarted","Data":"ad5f5675caf919d0962a66d18f36309c77128c59b0261df92308df5f35d1d6e9"} Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.912065 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-76546b4c8f-lxsz6" podStartSLOduration=1.912047119 podStartE2EDuration="1.912047119s" podCreationTimestamp="2026-02-16 14:41:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:41:38.906122238 +0000 UTC m=+5898.232835976" watchObservedRunningTime="2026-02-16 14:41:38.912047119 +0000 UTC m=+5898.238760847" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.983363 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c1f2ea-7117-4626-be6c-739c2a374880-operator-scripts\") pod \"heat-e296-account-create-update-k4wgc\" (UID: \"43c1f2ea-7117-4626-be6c-739c2a374880\") " pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.983440 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xp42x\" (UniqueName: \"kubernetes.io/projected/43c1f2ea-7117-4626-be6c-739c2a374880-kube-api-access-xp42x\") pod \"heat-e296-account-create-update-k4wgc\" (UID: \"43c1f2ea-7117-4626-be6c-739c2a374880\") " pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.983482 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-operator-scripts\") pod \"heat-db-create-ctdck\" (UID: \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\") " pod="openstack/heat-db-create-ctdck" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.983645 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkk5c\" (UniqueName: \"kubernetes.io/projected/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-kube-api-access-vkk5c\") pod \"heat-db-create-ctdck\" (UID: \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\") " pod="openstack/heat-db-create-ctdck" Feb 16 14:41:38 crc kubenswrapper[4816]: I0216 14:41:38.984771 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-operator-scripts\") pod \"heat-db-create-ctdck\" (UID: \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\") " pod="openstack/heat-db-create-ctdck" Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.001543 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkk5c\" (UniqueName: \"kubernetes.io/projected/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-kube-api-access-vkk5c\") pod \"heat-db-create-ctdck\" (UID: \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\") " pod="openstack/heat-db-create-ctdck" Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.048763 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-ctdck" Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.094554 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c1f2ea-7117-4626-be6c-739c2a374880-operator-scripts\") pod \"heat-e296-account-create-update-k4wgc\" (UID: \"43c1f2ea-7117-4626-be6c-739c2a374880\") " pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.094642 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xp42x\" (UniqueName: \"kubernetes.io/projected/43c1f2ea-7117-4626-be6c-739c2a374880-kube-api-access-xp42x\") pod \"heat-e296-account-create-update-k4wgc\" (UID: \"43c1f2ea-7117-4626-be6c-739c2a374880\") " pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.095704 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c1f2ea-7117-4626-be6c-739c2a374880-operator-scripts\") pod \"heat-e296-account-create-update-k4wgc\" (UID: \"43c1f2ea-7117-4626-be6c-739c2a374880\") " pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.120085 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xp42x\" (UniqueName: \"kubernetes.io/projected/43c1f2ea-7117-4626-be6c-739c2a374880-kube-api-access-xp42x\") pod \"heat-e296-account-create-update-k4wgc\" (UID: \"43c1f2ea-7117-4626-be6c-739c2a374880\") " pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.128487 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.614516 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-ctdck"] Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.679333 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-e296-account-create-update-k4wgc"] Feb 16 14:41:39 crc kubenswrapper[4816]: W0216 14:41:39.681942 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43c1f2ea_7117_4626_be6c_739c2a374880.slice/crio-d81a14d6a72eea69ca800911dcb206a4122cd72bc25fdbf27b41252c162be415 WatchSource:0}: Error finding container d81a14d6a72eea69ca800911dcb206a4122cd72bc25fdbf27b41252c162be415: Status 404 returned error can't find the container with id d81a14d6a72eea69ca800911dcb206a4122cd72bc25fdbf27b41252c162be415 Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.900041 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-ctdck" event={"ID":"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c","Type":"ContainerStarted","Data":"8ad79072d4889e8f9c632c743008998b0378f8dc66ad89d426c47608a286346c"} Feb 16 14:41:39 crc kubenswrapper[4816]: I0216 14:41:39.902029 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-e296-account-create-update-k4wgc" event={"ID":"43c1f2ea-7117-4626-be6c-739c2a374880","Type":"ContainerStarted","Data":"d81a14d6a72eea69ca800911dcb206a4122cd72bc25fdbf27b41252c162be415"} Feb 16 14:41:40 crc kubenswrapper[4816]: I0216 14:41:40.912823 4816 generic.go:334] "Generic (PLEG): container finished" podID="5a14c9cb-3e89-467d-aacf-4d3941cc0a5c" containerID="a93518da57cd504991169f0a1404404d12599b7618d21d0712da1a4a89d3efb6" exitCode=0 Feb 16 14:41:40 crc kubenswrapper[4816]: I0216 14:41:40.913076 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-ctdck" event={"ID":"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c","Type":"ContainerDied","Data":"a93518da57cd504991169f0a1404404d12599b7618d21d0712da1a4a89d3efb6"} Feb 16 14:41:40 crc kubenswrapper[4816]: I0216 14:41:40.915027 4816 generic.go:334] "Generic (PLEG): container finished" podID="43c1f2ea-7117-4626-be6c-739c2a374880" containerID="ec06ede898c773315bf6c3ae55a789b3b4b67906001ceed3571080eb9427f0a8" exitCode=0 Feb 16 14:41:40 crc kubenswrapper[4816]: I0216 14:41:40.915073 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-e296-account-create-update-k4wgc" event={"ID":"43c1f2ea-7117-4626-be6c-739c2a374880","Type":"ContainerDied","Data":"ec06ede898c773315bf6c3ae55a789b3b4b67906001ceed3571080eb9427f0a8"} Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.367547 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.376502 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-ctdck" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.401297 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-operator-scripts\") pod \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\" (UID: \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\") " Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.401352 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkk5c\" (UniqueName: \"kubernetes.io/projected/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-kube-api-access-vkk5c\") pod \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\" (UID: \"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c\") " Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.401399 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c1f2ea-7117-4626-be6c-739c2a374880-operator-scripts\") pod \"43c1f2ea-7117-4626-be6c-739c2a374880\" (UID: \"43c1f2ea-7117-4626-be6c-739c2a374880\") " Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.401439 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xp42x\" (UniqueName: \"kubernetes.io/projected/43c1f2ea-7117-4626-be6c-739c2a374880-kube-api-access-xp42x\") pod \"43c1f2ea-7117-4626-be6c-739c2a374880\" (UID: \"43c1f2ea-7117-4626-be6c-739c2a374880\") " Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.403384 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5a14c9cb-3e89-467d-aacf-4d3941cc0a5c" (UID: "5a14c9cb-3e89-467d-aacf-4d3941cc0a5c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.407724 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43c1f2ea-7117-4626-be6c-739c2a374880-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "43c1f2ea-7117-4626-be6c-739c2a374880" (UID: "43c1f2ea-7117-4626-be6c-739c2a374880"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.413567 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43c1f2ea-7117-4626-be6c-739c2a374880-kube-api-access-xp42x" (OuterVolumeSpecName: "kube-api-access-xp42x") pod "43c1f2ea-7117-4626-be6c-739c2a374880" (UID: "43c1f2ea-7117-4626-be6c-739c2a374880"). InnerVolumeSpecName "kube-api-access-xp42x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.426594 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-kube-api-access-vkk5c" (OuterVolumeSpecName: "kube-api-access-vkk5c") pod "5a14c9cb-3e89-467d-aacf-4d3941cc0a5c" (UID: "5a14c9cb-3e89-467d-aacf-4d3941cc0a5c"). InnerVolumeSpecName "kube-api-access-vkk5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.511371 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.520775 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkk5c\" (UniqueName: \"kubernetes.io/projected/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c-kube-api-access-vkk5c\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.520851 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c1f2ea-7117-4626-be6c-739c2a374880-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.520881 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xp42x\" (UniqueName: \"kubernetes.io/projected/43c1f2ea-7117-4626-be6c-739c2a374880-kube-api-access-xp42x\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.933678 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-e296-account-create-update-k4wgc" event={"ID":"43c1f2ea-7117-4626-be6c-739c2a374880","Type":"ContainerDied","Data":"d81a14d6a72eea69ca800911dcb206a4122cd72bc25fdbf27b41252c162be415"} Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.934045 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-e296-account-create-update-k4wgc" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.934192 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d81a14d6a72eea69ca800911dcb206a4122cd72bc25fdbf27b41252c162be415" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.935692 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-ctdck" event={"ID":"5a14c9cb-3e89-467d-aacf-4d3941cc0a5c","Type":"ContainerDied","Data":"8ad79072d4889e8f9c632c743008998b0378f8dc66ad89d426c47608a286346c"} Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.935725 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-ctdck" Feb 16 14:41:42 crc kubenswrapper[4816]: I0216 14:41:42.935736 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ad79072d4889e8f9c632c743008998b0378f8dc66ad89d426c47608a286346c" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.919826 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-t2889"] Feb 16 14:41:43 crc kubenswrapper[4816]: E0216 14:41:43.920701 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a14c9cb-3e89-467d-aacf-4d3941cc0a5c" containerName="mariadb-database-create" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.920726 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a14c9cb-3e89-467d-aacf-4d3941cc0a5c" containerName="mariadb-database-create" Feb 16 14:41:43 crc kubenswrapper[4816]: E0216 14:41:43.920759 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c1f2ea-7117-4626-be6c-739c2a374880" containerName="mariadb-account-create-update" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.920767 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c1f2ea-7117-4626-be6c-739c2a374880" containerName="mariadb-account-create-update" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.920984 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c1f2ea-7117-4626-be6c-739c2a374880" containerName="mariadb-account-create-update" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.921006 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a14c9cb-3e89-467d-aacf-4d3941cc0a5c" containerName="mariadb-database-create" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.921843 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-t2889" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.925221 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-bwtpl" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.925856 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.933128 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-t2889"] Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.953770 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-config-data\") pod \"heat-db-sync-t2889\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " pod="openstack/heat-db-sync-t2889" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.953991 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-combined-ca-bundle\") pod \"heat-db-sync-t2889\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " pod="openstack/heat-db-sync-t2889" Feb 16 14:41:43 crc kubenswrapper[4816]: I0216 14:41:43.954290 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjdsv\" (UniqueName: \"kubernetes.io/projected/8d3c380d-f02f-42ef-95e0-0188dabec0f1-kube-api-access-pjdsv\") pod \"heat-db-sync-t2889\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " pod="openstack/heat-db-sync-t2889" Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.056777 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjdsv\" (UniqueName: \"kubernetes.io/projected/8d3c380d-f02f-42ef-95e0-0188dabec0f1-kube-api-access-pjdsv\") pod \"heat-db-sync-t2889\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " pod="openstack/heat-db-sync-t2889" Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.056833 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-config-data\") pod \"heat-db-sync-t2889\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " pod="openstack/heat-db-sync-t2889" Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.056921 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-combined-ca-bundle\") pod \"heat-db-sync-t2889\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " pod="openstack/heat-db-sync-t2889" Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.070254 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-combined-ca-bundle\") pod \"heat-db-sync-t2889\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " pod="openstack/heat-db-sync-t2889" Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.072972 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-config-data\") pod \"heat-db-sync-t2889\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " pod="openstack/heat-db-sync-t2889" Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.074173 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjdsv\" (UniqueName: \"kubernetes.io/projected/8d3c380d-f02f-42ef-95e0-0188dabec0f1-kube-api-access-pjdsv\") pod \"heat-db-sync-t2889\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " pod="openstack/heat-db-sync-t2889" Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.259635 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-t2889" Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.755240 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.757120 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-t2889"] Feb 16 14:41:44 crc kubenswrapper[4816]: I0216 14:41:44.965310 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-t2889" event={"ID":"8d3c380d-f02f-42ef-95e0-0188dabec0f1","Type":"ContainerStarted","Data":"d64d3709fe5c564ec02344937459a1eeff0befbd4b12e350dbfbc5e17367cd45"} Feb 16 14:41:47 crc kubenswrapper[4816]: I0216 14:41:47.671711 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:47 crc kubenswrapper[4816]: I0216 14:41:47.672062 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:41:53 crc kubenswrapper[4816]: I0216 14:41:53.047717 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-t2889" event={"ID":"8d3c380d-f02f-42ef-95e0-0188dabec0f1","Type":"ContainerStarted","Data":"3ea9e664aff27f834c8adc935c0ed68cbb3762177c75292fd2a58335230380ca"} Feb 16 14:41:53 crc kubenswrapper[4816]: I0216 14:41:53.076131 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-t2889" podStartSLOduration=2.886954108 podStartE2EDuration="10.076109879s" podCreationTimestamp="2026-02-16 14:41:43 +0000 UTC" firstStartedPulling="2026-02-16 14:41:44.75484082 +0000 UTC m=+5904.081554568" lastFinishedPulling="2026-02-16 14:41:51.943996611 +0000 UTC m=+5911.270710339" observedRunningTime="2026-02-16 14:41:53.065014297 +0000 UTC m=+5912.391728025" watchObservedRunningTime="2026-02-16 14:41:53.076109879 +0000 UTC m=+5912.402823607" Feb 16 14:41:55 crc kubenswrapper[4816]: I0216 14:41:55.164736 4816 generic.go:334] "Generic (PLEG): container finished" podID="8d3c380d-f02f-42ef-95e0-0188dabec0f1" containerID="3ea9e664aff27f834c8adc935c0ed68cbb3762177c75292fd2a58335230380ca" exitCode=0 Feb 16 14:41:55 crc kubenswrapper[4816]: I0216 14:41:55.164839 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-t2889" event={"ID":"8d3c380d-f02f-42ef-95e0-0188dabec0f1","Type":"ContainerDied","Data":"3ea9e664aff27f834c8adc935c0ed68cbb3762177c75292fd2a58335230380ca"} Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.564818 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-t2889" Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.659718 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-config-data\") pod \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.660120 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-combined-ca-bundle\") pod \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.660237 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjdsv\" (UniqueName: \"kubernetes.io/projected/8d3c380d-f02f-42ef-95e0-0188dabec0f1-kube-api-access-pjdsv\") pod \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\" (UID: \"8d3c380d-f02f-42ef-95e0-0188dabec0f1\") " Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.668331 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d3c380d-f02f-42ef-95e0-0188dabec0f1-kube-api-access-pjdsv" (OuterVolumeSpecName: "kube-api-access-pjdsv") pod "8d3c380d-f02f-42ef-95e0-0188dabec0f1" (UID: "8d3c380d-f02f-42ef-95e0-0188dabec0f1"). InnerVolumeSpecName "kube-api-access-pjdsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.691972 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d3c380d-f02f-42ef-95e0-0188dabec0f1" (UID: "8d3c380d-f02f-42ef-95e0-0188dabec0f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.763145 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.763224 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjdsv\" (UniqueName: \"kubernetes.io/projected/8d3c380d-f02f-42ef-95e0-0188dabec0f1-kube-api-access-pjdsv\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.778222 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-config-data" (OuterVolumeSpecName: "config-data") pod "8d3c380d-f02f-42ef-95e0-0188dabec0f1" (UID: "8d3c380d-f02f-42ef-95e0-0188dabec0f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:41:56 crc kubenswrapper[4816]: I0216 14:41:56.864752 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3c380d-f02f-42ef-95e0-0188dabec0f1-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:41:57 crc kubenswrapper[4816]: I0216 14:41:57.184502 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-t2889" event={"ID":"8d3c380d-f02f-42ef-95e0-0188dabec0f1","Type":"ContainerDied","Data":"d64d3709fe5c564ec02344937459a1eeff0befbd4b12e350dbfbc5e17367cd45"} Feb 16 14:41:57 crc kubenswrapper[4816]: I0216 14:41:57.184552 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d64d3709fe5c564ec02344937459a1eeff0befbd4b12e350dbfbc5e17367cd45" Feb 16 14:41:57 crc kubenswrapper[4816]: I0216 14:41:57.184577 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-t2889" Feb 16 14:41:57 crc kubenswrapper[4816]: I0216 14:41:57.674172 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-76546b4c8f-lxsz6" podUID="545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.113:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.113:8080: connect: connection refused" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.415643 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-67fc45c8f8-s7bjc"] Feb 16 14:41:58 crc kubenswrapper[4816]: E0216 14:41:58.424139 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d3c380d-f02f-42ef-95e0-0188dabec0f1" containerName="heat-db-sync" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.424415 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d3c380d-f02f-42ef-95e0-0188dabec0f1" containerName="heat-db-sync" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.424799 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d3c380d-f02f-42ef-95e0-0188dabec0f1" containerName="heat-db-sync" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.425791 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.435297 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.435541 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-bwtpl" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.435741 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.436445 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-6f897687c8-lnh6w"] Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.438137 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.445032 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.449157 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-6f897687c8-lnh6w"] Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.462449 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-67fc45c8f8-s7bjc"] Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.494227 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-config-data\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.494281 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bae665e-4901-41cf-bc7e-2b47de9b6429-combined-ca-bundle\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.494309 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9k8n\" (UniqueName: \"kubernetes.io/projected/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-kube-api-access-l9k8n\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.494356 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-config-data-custom\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.494412 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6bae665e-4901-41cf-bc7e-2b47de9b6429-config-data-custom\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.494438 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6d5t\" (UniqueName: \"kubernetes.io/projected/6bae665e-4901-41cf-bc7e-2b47de9b6429-kube-api-access-t6d5t\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.494460 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bae665e-4901-41cf-bc7e-2b47de9b6429-config-data\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.494477 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-combined-ca-bundle\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.569969 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-5cfbdfb845-kzxzs"] Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.571325 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.574012 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.588255 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-5cfbdfb845-kzxzs"] Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.603758 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-config-data\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.603861 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bae665e-4901-41cf-bc7e-2b47de9b6429-combined-ca-bundle\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.603906 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9k8n\" (UniqueName: \"kubernetes.io/projected/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-kube-api-access-l9k8n\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.603962 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-config-data-custom\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.604042 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6bae665e-4901-41cf-bc7e-2b47de9b6429-config-data-custom\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.604098 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6d5t\" (UniqueName: \"kubernetes.io/projected/6bae665e-4901-41cf-bc7e-2b47de9b6429-kube-api-access-t6d5t\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.604143 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bae665e-4901-41cf-bc7e-2b47de9b6429-config-data\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.604162 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-combined-ca-bundle\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.615351 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-combined-ca-bundle\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.625229 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bae665e-4901-41cf-bc7e-2b47de9b6429-combined-ca-bundle\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.628735 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6bae665e-4901-41cf-bc7e-2b47de9b6429-config-data-custom\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.629634 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9k8n\" (UniqueName: \"kubernetes.io/projected/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-kube-api-access-l9k8n\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.631972 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bae665e-4901-41cf-bc7e-2b47de9b6429-config-data\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.634356 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6d5t\" (UniqueName: \"kubernetes.io/projected/6bae665e-4901-41cf-bc7e-2b47de9b6429-kube-api-access-t6d5t\") pod \"heat-api-6f897687c8-lnh6w\" (UID: \"6bae665e-4901-41cf-bc7e-2b47de9b6429\") " pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.639962 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-config-data\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.646012 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fcfe17aa-f8a3-46c2-9e93-6b9b147e415a-config-data-custom\") pod \"heat-engine-67fc45c8f8-s7bjc\" (UID: \"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a\") " pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.714023 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1efa856-88c3-4311-9634-7370c2a2db47-config-data\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.714420 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1efa856-88c3-4311-9634-7370c2a2db47-combined-ca-bundle\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.714591 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1efa856-88c3-4311-9634-7370c2a2db47-config-data-custom\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.714811 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sbwc\" (UniqueName: \"kubernetes.io/projected/a1efa856-88c3-4311-9634-7370c2a2db47-kube-api-access-2sbwc\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.764675 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.779518 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.817438 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1efa856-88c3-4311-9634-7370c2a2db47-combined-ca-bundle\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.817840 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1efa856-88c3-4311-9634-7370c2a2db47-config-data-custom\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.817883 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sbwc\" (UniqueName: \"kubernetes.io/projected/a1efa856-88c3-4311-9634-7370c2a2db47-kube-api-access-2sbwc\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.817909 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1efa856-88c3-4311-9634-7370c2a2db47-config-data\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.824608 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1efa856-88c3-4311-9634-7370c2a2db47-combined-ca-bundle\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.841275 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1efa856-88c3-4311-9634-7370c2a2db47-config-data\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.841827 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1efa856-88c3-4311-9634-7370c2a2db47-config-data-custom\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.844955 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sbwc\" (UniqueName: \"kubernetes.io/projected/a1efa856-88c3-4311-9634-7370c2a2db47-kube-api-access-2sbwc\") pod \"heat-cfnapi-5cfbdfb845-kzxzs\" (UID: \"a1efa856-88c3-4311-9634-7370c2a2db47\") " pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:58 crc kubenswrapper[4816]: I0216 14:41:58.896224 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:41:59 crc kubenswrapper[4816]: I0216 14:41:59.326214 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-67fc45c8f8-s7bjc"] Feb 16 14:41:59 crc kubenswrapper[4816]: I0216 14:41:59.422397 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-6f897687c8-lnh6w"] Feb 16 14:41:59 crc kubenswrapper[4816]: I0216 14:41:59.545962 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-5cfbdfb845-kzxzs"] Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.049220 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-a715-account-create-update-2tzsx"] Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.058005 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-a715-account-create-update-2tzsx"] Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.068192 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-9jpzt"] Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.077126 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-9jpzt"] Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.260842 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-67fc45c8f8-s7bjc" event={"ID":"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a","Type":"ContainerStarted","Data":"425988b6158544219abd8cd4d25681a148d1f52fd60f6b8d776cb224e5608763"} Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.260888 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-67fc45c8f8-s7bjc" event={"ID":"fcfe17aa-f8a3-46c2-9e93-6b9b147e415a","Type":"ContainerStarted","Data":"ddfee4086dc7f42e75aa5498a6929b542ee76254c48eb4678571af48bf925ec7"} Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.262145 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.283170 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" event={"ID":"a1efa856-88c3-4311-9634-7370c2a2db47","Type":"ContainerStarted","Data":"5bb54d252f6129792b2fb23e0f3a670419b264c76c653f9e03ed37ffc1c41be6"} Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.293632 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-67fc45c8f8-s7bjc" podStartSLOduration=2.293613273 podStartE2EDuration="2.293613273s" podCreationTimestamp="2026-02-16 14:41:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:42:00.291717902 +0000 UTC m=+5919.618431650" watchObservedRunningTime="2026-02-16 14:42:00.293613273 +0000 UTC m=+5919.620327001" Feb 16 14:42:00 crc kubenswrapper[4816]: I0216 14:42:00.311957 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6f897687c8-lnh6w" event={"ID":"6bae665e-4901-41cf-bc7e-2b47de9b6429","Type":"ContainerStarted","Data":"ca705e875c922b6e547e2ca80aa653f74cf76fbbfb1cc3c31bdb24f6682792b9"} Feb 16 14:42:01 crc kubenswrapper[4816]: I0216 14:42:01.443432 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f480b29-6c4f-4fb6-806d-9f505e3377f9" path="/var/lib/kubelet/pods/0f480b29-6c4f-4fb6-806d-9f505e3377f9/volumes" Feb 16 14:42:01 crc kubenswrapper[4816]: I0216 14:42:01.444933 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8" path="/var/lib/kubelet/pods/a470d3c9-ccf7-4a83-a2e8-2bd1c1fd46c8/volumes" Feb 16 14:42:02 crc kubenswrapper[4816]: I0216 14:42:02.435592 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" event={"ID":"a1efa856-88c3-4311-9634-7370c2a2db47","Type":"ContainerStarted","Data":"9842332a2c30e85402db2784fe3aeeb26d42256c4366b45eddc110c8f6a9bd8c"} Feb 16 14:42:02 crc kubenswrapper[4816]: I0216 14:42:02.435944 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:42:02 crc kubenswrapper[4816]: I0216 14:42:02.446342 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6f897687c8-lnh6w" event={"ID":"6bae665e-4901-41cf-bc7e-2b47de9b6429","Type":"ContainerStarted","Data":"340b2298130e0bce4012f31023a9c6be3f9a436350a8dc2e699e43f9b884266c"} Feb 16 14:42:02 crc kubenswrapper[4816]: I0216 14:42:02.447308 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:42:02 crc kubenswrapper[4816]: I0216 14:42:02.473521 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" podStartSLOduration=2.3697578999999998 podStartE2EDuration="4.473500141s" podCreationTimestamp="2026-02-16 14:41:58 +0000 UTC" firstStartedPulling="2026-02-16 14:41:59.558716456 +0000 UTC m=+5918.885430184" lastFinishedPulling="2026-02-16 14:42:01.662458697 +0000 UTC m=+5920.989172425" observedRunningTime="2026-02-16 14:42:02.458152682 +0000 UTC m=+5921.784866410" watchObservedRunningTime="2026-02-16 14:42:02.473500141 +0000 UTC m=+5921.800213859" Feb 16 14:42:02 crc kubenswrapper[4816]: I0216 14:42:02.492852 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-6f897687c8-lnh6w" podStartSLOduration=2.260634795 podStartE2EDuration="4.492832258s" podCreationTimestamp="2026-02-16 14:41:58 +0000 UTC" firstStartedPulling="2026-02-16 14:41:59.428112655 +0000 UTC m=+5918.754826383" lastFinishedPulling="2026-02-16 14:42:01.660310118 +0000 UTC m=+5920.987023846" observedRunningTime="2026-02-16 14:42:02.480532662 +0000 UTC m=+5921.807246390" watchObservedRunningTime="2026-02-16 14:42:02.492832258 +0000 UTC m=+5921.819545986" Feb 16 14:42:07 crc kubenswrapper[4816]: I0216 14:42:07.038146 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-zfnz9"] Feb 16 14:42:07 crc kubenswrapper[4816]: I0216 14:42:07.051806 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-zfnz9"] Feb 16 14:42:07 crc kubenswrapper[4816]: I0216 14:42:07.411988 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1910b7f-7be1-47c4-873b-80f10d60bd0d" path="/var/lib/kubelet/pods/b1910b7f-7be1-47c4-873b-80f10d60bd0d/volumes" Feb 16 14:42:09 crc kubenswrapper[4816]: I0216 14:42:09.776580 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:42:10 crc kubenswrapper[4816]: I0216 14:42:10.325961 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-6f897687c8-lnh6w" Feb 16 14:42:10 crc kubenswrapper[4816]: I0216 14:42:10.462710 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-5cfbdfb845-kzxzs" Feb 16 14:42:11 crc kubenswrapper[4816]: I0216 14:42:11.843594 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-76546b4c8f-lxsz6" Feb 16 14:42:11 crc kubenswrapper[4816]: I0216 14:42:11.909258 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8bdf5995c-wjvtc"] Feb 16 14:42:11 crc kubenswrapper[4816]: I0216 14:42:11.909538 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8bdf5995c-wjvtc" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon-log" containerID="cri-o://19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412" gracePeriod=30 Feb 16 14:42:11 crc kubenswrapper[4816]: I0216 14:42:11.909818 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8bdf5995c-wjvtc" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon" containerID="cri-o://51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde" gracePeriod=30 Feb 16 14:42:15 crc kubenswrapper[4816]: I0216 14:42:15.648612 4816 generic.go:334] "Generic (PLEG): container finished" podID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerID="51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde" exitCode=0 Feb 16 14:42:15 crc kubenswrapper[4816]: I0216 14:42:15.648694 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8bdf5995c-wjvtc" event={"ID":"f775b181-bb2d-408d-81ac-2bdc2046184c","Type":"ContainerDied","Data":"51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde"} Feb 16 14:42:18 crc kubenswrapper[4816]: I0216 14:42:18.792545 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-67fc45c8f8-s7bjc" Feb 16 14:42:21 crc kubenswrapper[4816]: I0216 14:42:21.432049 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8bdf5995c-wjvtc" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.110:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.110:8080: connect: connection refused" Feb 16 14:42:29 crc kubenswrapper[4816]: I0216 14:42:29.384893 4816 scope.go:117] "RemoveContainer" containerID="5bb65dda369f1cacdada31c4a52006663090e765d5da790935ffacd22c996e33" Feb 16 14:42:29 crc kubenswrapper[4816]: I0216 14:42:29.417760 4816 scope.go:117] "RemoveContainer" containerID="24789091f3f1fa925ed35e52df49ae01a4b099ffe3f7414a6384f47ca468d0ed" Feb 16 14:42:29 crc kubenswrapper[4816]: I0216 14:42:29.466558 4816 scope.go:117] "RemoveContainer" containerID="ac352d9ae6ade149ca9d3bda5c25baa31a2e4c90e6b386a1f2bb352eb18c6e76" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.021700 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg"] Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.024581 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.029319 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.045181 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg"] Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.188091 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.188406 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-467km\" (UniqueName: \"kubernetes.io/projected/f31af8d4-0560-48c4-8471-8fc736b13844-kube-api-access-467km\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.188685 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.290576 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-467km\" (UniqueName: \"kubernetes.io/projected/f31af8d4-0560-48c4-8471-8fc736b13844-kube-api-access-467km\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.290760 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.290834 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.291498 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.291545 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.313036 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-467km\" (UniqueName: \"kubernetes.io/projected/f31af8d4-0560-48c4-8471-8fc736b13844-kube-api-access-467km\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.357126 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.431372 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8bdf5995c-wjvtc" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.110:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.110:8080: connect: connection refused" Feb 16 14:42:31 crc kubenswrapper[4816]: I0216 14:42:31.821346 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg"] Feb 16 14:42:32 crc kubenswrapper[4816]: I0216 14:42:32.821171 4816 generic.go:334] "Generic (PLEG): container finished" podID="f31af8d4-0560-48c4-8471-8fc736b13844" containerID="416b19f424a5fbe5afca18cd3e3d7e5053cb4eea1ad68578f2d11aa9304e1dbf" exitCode=0 Feb 16 14:42:32 crc kubenswrapper[4816]: I0216 14:42:32.821369 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" event={"ID":"f31af8d4-0560-48c4-8471-8fc736b13844","Type":"ContainerDied","Data":"416b19f424a5fbe5afca18cd3e3d7e5053cb4eea1ad68578f2d11aa9304e1dbf"} Feb 16 14:42:32 crc kubenswrapper[4816]: I0216 14:42:32.821507 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" event={"ID":"f31af8d4-0560-48c4-8471-8fc736b13844","Type":"ContainerStarted","Data":"82c63833160507aae108ac3945fcfa75e2b639c7bfd65d231ce880e687dd1d4c"} Feb 16 14:42:35 crc kubenswrapper[4816]: I0216 14:42:35.847777 4816 generic.go:334] "Generic (PLEG): container finished" podID="f31af8d4-0560-48c4-8471-8fc736b13844" containerID="fb75c0d88684c73ec9840f301469623d64ce7886919c2051cb0a942949065f9f" exitCode=0 Feb 16 14:42:35 crc kubenswrapper[4816]: I0216 14:42:35.847853 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" event={"ID":"f31af8d4-0560-48c4-8471-8fc736b13844","Type":"ContainerDied","Data":"fb75c0d88684c73ec9840f301469623d64ce7886919c2051cb0a942949065f9f"} Feb 16 14:42:36 crc kubenswrapper[4816]: I0216 14:42:36.859292 4816 generic.go:334] "Generic (PLEG): container finished" podID="f31af8d4-0560-48c4-8471-8fc736b13844" containerID="5072506dac5474e7c02e656d915b926ca60e1c7c7462945cee2625ecd87bcc40" exitCode=0 Feb 16 14:42:36 crc kubenswrapper[4816]: I0216 14:42:36.859384 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" event={"ID":"f31af8d4-0560-48c4-8471-8fc736b13844","Type":"ContainerDied","Data":"5072506dac5474e7c02e656d915b926ca60e1c7c7462945cee2625ecd87bcc40"} Feb 16 14:42:36 crc kubenswrapper[4816]: I0216 14:42:36.940712 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:42:36 crc kubenswrapper[4816]: I0216 14:42:36.940768 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:42:37 crc kubenswrapper[4816]: I0216 14:42:37.053594 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-lbnxq"] Feb 16 14:42:37 crc kubenswrapper[4816]: I0216 14:42:37.062507 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-1890-account-create-update-przbk"] Feb 16 14:42:37 crc kubenswrapper[4816]: I0216 14:42:37.070395 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-lbnxq"] Feb 16 14:42:37 crc kubenswrapper[4816]: I0216 14:42:37.079202 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-1890-account-create-update-przbk"] Feb 16 14:42:37 crc kubenswrapper[4816]: I0216 14:42:37.410495 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0612081d-442a-4110-8a23-8b5654e6a238" path="/var/lib/kubelet/pods/0612081d-442a-4110-8a23-8b5654e6a238/volumes" Feb 16 14:42:37 crc kubenswrapper[4816]: I0216 14:42:37.411675 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db51a0b5-f0ae-4ad6-907c-ccf15813df4f" path="/var/lib/kubelet/pods/db51a0b5-f0ae-4ad6-907c-ccf15813df4f/volumes" Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.317338 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.441555 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-util\") pod \"f31af8d4-0560-48c4-8471-8fc736b13844\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.441975 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-467km\" (UniqueName: \"kubernetes.io/projected/f31af8d4-0560-48c4-8471-8fc736b13844-kube-api-access-467km\") pod \"f31af8d4-0560-48c4-8471-8fc736b13844\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.442004 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-bundle\") pod \"f31af8d4-0560-48c4-8471-8fc736b13844\" (UID: \"f31af8d4-0560-48c4-8471-8fc736b13844\") " Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.443807 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-bundle" (OuterVolumeSpecName: "bundle") pod "f31af8d4-0560-48c4-8471-8fc736b13844" (UID: "f31af8d4-0560-48c4-8471-8fc736b13844"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.446761 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f31af8d4-0560-48c4-8471-8fc736b13844-kube-api-access-467km" (OuterVolumeSpecName: "kube-api-access-467km") pod "f31af8d4-0560-48c4-8471-8fc736b13844" (UID: "f31af8d4-0560-48c4-8471-8fc736b13844"). InnerVolumeSpecName "kube-api-access-467km". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.452945 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-util" (OuterVolumeSpecName: "util") pod "f31af8d4-0560-48c4-8471-8fc736b13844" (UID: "f31af8d4-0560-48c4-8471-8fc736b13844"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.544985 4816 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-util\") on node \"crc\" DevicePath \"\"" Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.545015 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-467km\" (UniqueName: \"kubernetes.io/projected/f31af8d4-0560-48c4-8471-8fc736b13844-kube-api-access-467km\") on node \"crc\" DevicePath \"\"" Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.545029 4816 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f31af8d4-0560-48c4-8471-8fc736b13844-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.887172 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" event={"ID":"f31af8d4-0560-48c4-8471-8fc736b13844","Type":"ContainerDied","Data":"82c63833160507aae108ac3945fcfa75e2b639c7bfd65d231ce880e687dd1d4c"} Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.887208 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82c63833160507aae108ac3945fcfa75e2b639c7bfd65d231ce880e687dd1d4c" Feb 16 14:42:38 crc kubenswrapper[4816]: I0216 14:42:38.887238 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg" Feb 16 14:42:41 crc kubenswrapper[4816]: I0216 14:42:41.430744 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8bdf5995c-wjvtc" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.110:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.110:8080: connect: connection refused" Feb 16 14:42:41 crc kubenswrapper[4816]: I0216 14:42:41.431211 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.374343 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.529912 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-config-data\") pod \"f775b181-bb2d-408d-81ac-2bdc2046184c\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.530088 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f775b181-bb2d-408d-81ac-2bdc2046184c-logs\") pod \"f775b181-bb2d-408d-81ac-2bdc2046184c\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.530227 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-scripts\") pod \"f775b181-bb2d-408d-81ac-2bdc2046184c\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.530373 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f775b181-bb2d-408d-81ac-2bdc2046184c-horizon-secret-key\") pod \"f775b181-bb2d-408d-81ac-2bdc2046184c\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.530416 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkmnj\" (UniqueName: \"kubernetes.io/projected/f775b181-bb2d-408d-81ac-2bdc2046184c-kube-api-access-pkmnj\") pod \"f775b181-bb2d-408d-81ac-2bdc2046184c\" (UID: \"f775b181-bb2d-408d-81ac-2bdc2046184c\") " Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.531305 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f775b181-bb2d-408d-81ac-2bdc2046184c-logs" (OuterVolumeSpecName: "logs") pod "f775b181-bb2d-408d-81ac-2bdc2046184c" (UID: "f775b181-bb2d-408d-81ac-2bdc2046184c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.535514 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f775b181-bb2d-408d-81ac-2bdc2046184c-kube-api-access-pkmnj" (OuterVolumeSpecName: "kube-api-access-pkmnj") pod "f775b181-bb2d-408d-81ac-2bdc2046184c" (UID: "f775b181-bb2d-408d-81ac-2bdc2046184c"). InnerVolumeSpecName "kube-api-access-pkmnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.541816 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f775b181-bb2d-408d-81ac-2bdc2046184c-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f775b181-bb2d-408d-81ac-2bdc2046184c" (UID: "f775b181-bb2d-408d-81ac-2bdc2046184c"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.555496 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-scripts" (OuterVolumeSpecName: "scripts") pod "f775b181-bb2d-408d-81ac-2bdc2046184c" (UID: "f775b181-bb2d-408d-81ac-2bdc2046184c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.558526 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-config-data" (OuterVolumeSpecName: "config-data") pod "f775b181-bb2d-408d-81ac-2bdc2046184c" (UID: "f775b181-bb2d-408d-81ac-2bdc2046184c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.633738 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.633776 4816 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f775b181-bb2d-408d-81ac-2bdc2046184c-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.633792 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkmnj\" (UniqueName: \"kubernetes.io/projected/f775b181-bb2d-408d-81ac-2bdc2046184c-kube-api-access-pkmnj\") on node \"crc\" DevicePath \"\"" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.633804 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f775b181-bb2d-408d-81ac-2bdc2046184c-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.633817 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f775b181-bb2d-408d-81ac-2bdc2046184c-logs\") on node \"crc\" DevicePath \"\"" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.929198 4816 generic.go:334] "Generic (PLEG): container finished" podID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerID="19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412" exitCode=137 Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.929251 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8bdf5995c-wjvtc" event={"ID":"f775b181-bb2d-408d-81ac-2bdc2046184c","Type":"ContainerDied","Data":"19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412"} Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.929296 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8bdf5995c-wjvtc" event={"ID":"f775b181-bb2d-408d-81ac-2bdc2046184c","Type":"ContainerDied","Data":"f137b95c7f16a315265ca3df23cef9f9bd7add3c4324cdc97c9fb201daab8742"} Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.929318 4816 scope.go:117] "RemoveContainer" containerID="51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde" Feb 16 14:42:42 crc kubenswrapper[4816]: I0216 14:42:42.929708 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8bdf5995c-wjvtc" Feb 16 14:42:43 crc kubenswrapper[4816]: I0216 14:42:43.002735 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8bdf5995c-wjvtc"] Feb 16 14:42:43 crc kubenswrapper[4816]: I0216 14:42:43.014758 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-8bdf5995c-wjvtc"] Feb 16 14:42:43 crc kubenswrapper[4816]: I0216 14:42:43.152366 4816 scope.go:117] "RemoveContainer" containerID="19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412" Feb 16 14:42:43 crc kubenswrapper[4816]: I0216 14:42:43.199890 4816 scope.go:117] "RemoveContainer" containerID="51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde" Feb 16 14:42:43 crc kubenswrapper[4816]: E0216 14:42:43.200511 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde\": container with ID starting with 51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde not found: ID does not exist" containerID="51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde" Feb 16 14:42:43 crc kubenswrapper[4816]: I0216 14:42:43.200674 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde"} err="failed to get container status \"51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde\": rpc error: code = NotFound desc = could not find container \"51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde\": container with ID starting with 51bb294b5d377f6b3b3d9e678dcc24637832cb0af2d6a1cad5d55a9d96688fde not found: ID does not exist" Feb 16 14:42:43 crc kubenswrapper[4816]: I0216 14:42:43.200812 4816 scope.go:117] "RemoveContainer" containerID="19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412" Feb 16 14:42:43 crc kubenswrapper[4816]: E0216 14:42:43.201194 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412\": container with ID starting with 19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412 not found: ID does not exist" containerID="19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412" Feb 16 14:42:43 crc kubenswrapper[4816]: I0216 14:42:43.201242 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412"} err="failed to get container status \"19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412\": rpc error: code = NotFound desc = could not find container \"19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412\": container with ID starting with 19a13c978ec47f15d8631fb552de019610c78b7a148d51666412d2cbe2646412 not found: ID does not exist" Feb 16 14:42:43 crc kubenswrapper[4816]: I0216 14:42:43.408917 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" path="/var/lib/kubelet/pods/f775b181-bb2d-408d-81ac-2bdc2046184c/volumes" Feb 16 14:42:44 crc kubenswrapper[4816]: I0216 14:42:44.048336 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-pqcs9"] Feb 16 14:42:44 crc kubenswrapper[4816]: I0216 14:42:44.069256 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-pqcs9"] Feb 16 14:42:45 crc kubenswrapper[4816]: I0216 14:42:45.420796 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e541c102-5476-4214-9555-361d24cef7c9" path="/var/lib/kubelet/pods/e541c102-5476-4214-9555-361d24cef7c9/volumes" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.743916 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg"] Feb 16 14:42:49 crc kubenswrapper[4816]: E0216 14:42:49.744918 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f31af8d4-0560-48c4-8471-8fc736b13844" containerName="pull" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.744937 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f31af8d4-0560-48c4-8471-8fc736b13844" containerName="pull" Feb 16 14:42:49 crc kubenswrapper[4816]: E0216 14:42:49.744962 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f31af8d4-0560-48c4-8471-8fc736b13844" containerName="extract" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.744970 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f31af8d4-0560-48c4-8471-8fc736b13844" containerName="extract" Feb 16 14:42:49 crc kubenswrapper[4816]: E0216 14:42:49.744986 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.744993 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon" Feb 16 14:42:49 crc kubenswrapper[4816]: E0216 14:42:49.745006 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon-log" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.745013 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon-log" Feb 16 14:42:49 crc kubenswrapper[4816]: E0216 14:42:49.745051 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f31af8d4-0560-48c4-8471-8fc736b13844" containerName="util" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.745137 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f31af8d4-0560-48c4-8471-8fc736b13844" containerName="util" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.745355 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f31af8d4-0560-48c4-8471-8fc736b13844" containerName="extract" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.745381 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon-log" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.745401 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f775b181-bb2d-408d-81ac-2bdc2046184c" containerName="horizon" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.746250 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.748271 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-wk7cb" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.748715 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.748722 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.764850 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg"] Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.792486 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6jxz\" (UniqueName: \"kubernetes.io/projected/dfea21df-03cb-4b66-be23-7d06f1036ac6-kube-api-access-n6jxz\") pod \"obo-prometheus-operator-68bc856cb9-wtmzg\" (UID: \"dfea21df-03cb-4b66-be23-7d06f1036ac6\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.880317 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7"] Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.881849 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.888835 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-7x5qh" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.888906 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm"] Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.889525 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.890583 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.896567 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6jxz\" (UniqueName: \"kubernetes.io/projected/dfea21df-03cb-4b66-be23-7d06f1036ac6-kube-api-access-n6jxz\") pod \"obo-prometheus-operator-68bc856cb9-wtmzg\" (UID: \"dfea21df-03cb-4b66-be23-7d06f1036ac6\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.907207 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7"] Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.929194 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6jxz\" (UniqueName: \"kubernetes.io/projected/dfea21df-03cb-4b66-be23-7d06f1036ac6-kube-api-access-n6jxz\") pod \"obo-prometheus-operator-68bc856cb9-wtmzg\" (UID: \"dfea21df-03cb-4b66-be23-7d06f1036ac6\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.951880 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm"] Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.998767 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/52fc463e-620e-4f7b-94df-67a832835a06-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm\" (UID: \"52fc463e-620e-4f7b-94df-67a832835a06\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.998917 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/52fc463e-620e-4f7b-94df-67a832835a06-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm\" (UID: \"52fc463e-620e-4f7b-94df-67a832835a06\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.998974 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0eb83954-2f81-4057-bbf7-c10ce7aba9fd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7\" (UID: \"0eb83954-2f81-4057-bbf7-c10ce7aba9fd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" Feb 16 14:42:49 crc kubenswrapper[4816]: I0216 14:42:49.999033 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0eb83954-2f81-4057-bbf7-c10ce7aba9fd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7\" (UID: \"0eb83954-2f81-4057-bbf7-c10ce7aba9fd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.073820 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-vtzp5"] Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.075524 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.080056 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.080144 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-dfcvv" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.092768 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-vtzp5"] Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.100383 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0eb83954-2f81-4057-bbf7-c10ce7aba9fd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7\" (UID: \"0eb83954-2f81-4057-bbf7-c10ce7aba9fd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.100453 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0eb83954-2f81-4057-bbf7-c10ce7aba9fd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7\" (UID: \"0eb83954-2f81-4057-bbf7-c10ce7aba9fd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.100543 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b-observability-operator-tls\") pod \"observability-operator-59bdc8b94-vtzp5\" (UID: \"526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b\") " pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.100600 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bgcw\" (UniqueName: \"kubernetes.io/projected/526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b-kube-api-access-6bgcw\") pod \"observability-operator-59bdc8b94-vtzp5\" (UID: \"526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b\") " pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.100647 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/52fc463e-620e-4f7b-94df-67a832835a06-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm\" (UID: \"52fc463e-620e-4f7b-94df-67a832835a06\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.100716 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/52fc463e-620e-4f7b-94df-67a832835a06-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm\" (UID: \"52fc463e-620e-4f7b-94df-67a832835a06\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.108406 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0eb83954-2f81-4057-bbf7-c10ce7aba9fd-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7\" (UID: \"0eb83954-2f81-4057-bbf7-c10ce7aba9fd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.108552 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/52fc463e-620e-4f7b-94df-67a832835a06-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm\" (UID: \"52fc463e-620e-4f7b-94df-67a832835a06\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.108924 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/52fc463e-620e-4f7b-94df-67a832835a06-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm\" (UID: \"52fc463e-620e-4f7b-94df-67a832835a06\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.108901 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.112728 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0eb83954-2f81-4057-bbf7-c10ce7aba9fd-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7\" (UID: \"0eb83954-2f81-4057-bbf7-c10ce7aba9fd\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.194460 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-z8fb9"] Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.196323 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.200304 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-5jfn6" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.203929 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bgcw\" (UniqueName: \"kubernetes.io/projected/526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b-kube-api-access-6bgcw\") pod \"observability-operator-59bdc8b94-vtzp5\" (UID: \"526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b\") " pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.204274 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b-observability-operator-tls\") pod \"observability-operator-59bdc8b94-vtzp5\" (UID: \"526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b\") " pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.209220 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.209808 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b-observability-operator-tls\") pod \"observability-operator-59bdc8b94-vtzp5\" (UID: \"526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b\") " pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.213629 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-z8fb9"] Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.234460 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.258716 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bgcw\" (UniqueName: \"kubernetes.io/projected/526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b-kube-api-access-6bgcw\") pod \"observability-operator-59bdc8b94-vtzp5\" (UID: \"526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b\") " pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.324869 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.325647 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/ccea818a-55d6-46cf-b00b-a888623a16d6-openshift-service-ca\") pod \"perses-operator-5bf474d74f-z8fb9\" (UID: \"ccea818a-55d6-46cf-b00b-a888623a16d6\") " pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.325741 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn69z\" (UniqueName: \"kubernetes.io/projected/ccea818a-55d6-46cf-b00b-a888623a16d6-kube-api-access-pn69z\") pod \"perses-operator-5bf474d74f-z8fb9\" (UID: \"ccea818a-55d6-46cf-b00b-a888623a16d6\") " pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.429719 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/ccea818a-55d6-46cf-b00b-a888623a16d6-openshift-service-ca\") pod \"perses-operator-5bf474d74f-z8fb9\" (UID: \"ccea818a-55d6-46cf-b00b-a888623a16d6\") " pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.429825 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn69z\" (UniqueName: \"kubernetes.io/projected/ccea818a-55d6-46cf-b00b-a888623a16d6-kube-api-access-pn69z\") pod \"perses-operator-5bf474d74f-z8fb9\" (UID: \"ccea818a-55d6-46cf-b00b-a888623a16d6\") " pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.431665 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/ccea818a-55d6-46cf-b00b-a888623a16d6-openshift-service-ca\") pod \"perses-operator-5bf474d74f-z8fb9\" (UID: \"ccea818a-55d6-46cf-b00b-a888623a16d6\") " pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.455796 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn69z\" (UniqueName: \"kubernetes.io/projected/ccea818a-55d6-46cf-b00b-a888623a16d6-kube-api-access-pn69z\") pod \"perses-operator-5bf474d74f-z8fb9\" (UID: \"ccea818a-55d6-46cf-b00b-a888623a16d6\") " pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:42:50 crc kubenswrapper[4816]: I0216 14:42:50.652960 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:42:51 crc kubenswrapper[4816]: I0216 14:42:51.364294 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg"] Feb 16 14:42:51 crc kubenswrapper[4816]: I0216 14:42:51.529316 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7"] Feb 16 14:42:51 crc kubenswrapper[4816]: W0216 14:42:51.537354 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0eb83954_2f81_4057_bbf7_c10ce7aba9fd.slice/crio-c48baee59b829b06f7dc390546ada91904c9021ca4cb6d40b60b8dbdabd2a462 WatchSource:0}: Error finding container c48baee59b829b06f7dc390546ada91904c9021ca4cb6d40b60b8dbdabd2a462: Status 404 returned error can't find the container with id c48baee59b829b06f7dc390546ada91904c9021ca4cb6d40b60b8dbdabd2a462 Feb 16 14:42:51 crc kubenswrapper[4816]: I0216 14:42:51.545355 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-vtzp5"] Feb 16 14:42:51 crc kubenswrapper[4816]: I0216 14:42:51.564346 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-z8fb9"] Feb 16 14:42:51 crc kubenswrapper[4816]: W0216 14:42:51.568909 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccea818a_55d6_46cf_b00b_a888623a16d6.slice/crio-e55c2d3b5fafd09ca7c121a1bba9b9e26b99fb0d0ccd997acd1b10c5ee10bd82 WatchSource:0}: Error finding container e55c2d3b5fafd09ca7c121a1bba9b9e26b99fb0d0ccd997acd1b10c5ee10bd82: Status 404 returned error can't find the container with id e55c2d3b5fafd09ca7c121a1bba9b9e26b99fb0d0ccd997acd1b10c5ee10bd82 Feb 16 14:42:51 crc kubenswrapper[4816]: I0216 14:42:51.688696 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm"] Feb 16 14:42:52 crc kubenswrapper[4816]: I0216 14:42:52.034061 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" event={"ID":"52fc463e-620e-4f7b-94df-67a832835a06","Type":"ContainerStarted","Data":"4a6a406c1d167a8ad1da5b36240d80f8c6b900039968e7c9653c1b936455860d"} Feb 16 14:42:52 crc kubenswrapper[4816]: I0216 14:42:52.051448 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg" event={"ID":"dfea21df-03cb-4b66-be23-7d06f1036ac6","Type":"ContainerStarted","Data":"1a7d69bd40fc8243d37e957ae9df6460cdaaf04f3e7b8d7da29e500fb6bcc575"} Feb 16 14:42:52 crc kubenswrapper[4816]: I0216 14:42:52.054935 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" event={"ID":"526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b","Type":"ContainerStarted","Data":"1a1c2ee9bed9ecac33b73fef7b64f9f930fe56bc8b3cbc76acea43b26be62819"} Feb 16 14:42:52 crc kubenswrapper[4816]: I0216 14:42:52.058583 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" event={"ID":"ccea818a-55d6-46cf-b00b-a888623a16d6","Type":"ContainerStarted","Data":"e55c2d3b5fafd09ca7c121a1bba9b9e26b99fb0d0ccd997acd1b10c5ee10bd82"} Feb 16 14:42:52 crc kubenswrapper[4816]: I0216 14:42:52.060300 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" event={"ID":"0eb83954-2f81-4057-bbf7-c10ce7aba9fd","Type":"ContainerStarted","Data":"c48baee59b829b06f7dc390546ada91904c9021ca4cb6d40b60b8dbdabd2a462"} Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.289286 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" event={"ID":"0eb83954-2f81-4057-bbf7-c10ce7aba9fd","Type":"ContainerStarted","Data":"e1afa861b5cbcf50e8fd6205f9c8c157fb06f8ee77610bd59d852c3ce59f35b4"} Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.291121 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" event={"ID":"52fc463e-620e-4f7b-94df-67a832835a06","Type":"ContainerStarted","Data":"a61b47faf999d6efd877a60706c222ccfbf8d270ca43ef43bb33697307f365c8"} Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.292782 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg" event={"ID":"dfea21df-03cb-4b66-be23-7d06f1036ac6","Type":"ContainerStarted","Data":"85891eebc6096120b86003f1fdbfcc8bbb2b3eef7cab167358ffe74b94c1533a"} Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.294717 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" event={"ID":"526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b","Type":"ContainerStarted","Data":"84bee6466d5bcb93838f5a7adca4d90a5e68b0f6ecce0479d60e1ff10f53f935"} Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.297234 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.301577 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.304607 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" event={"ID":"ccea818a-55d6-46cf-b00b-a888623a16d6","Type":"ContainerStarted","Data":"230539b57898bf9c29527b5e19e0425d1c4fea0ffa6590ffb3fdd60b98442838"} Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.304792 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.317384 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7" podStartSLOduration=3.669380572 podStartE2EDuration="16.317322453s" podCreationTimestamp="2026-02-16 14:42:49 +0000 UTC" firstStartedPulling="2026-02-16 14:42:51.544217602 +0000 UTC m=+5970.870931330" lastFinishedPulling="2026-02-16 14:43:04.192159473 +0000 UTC m=+5983.518873211" observedRunningTime="2026-02-16 14:43:05.307024611 +0000 UTC m=+5984.633738349" watchObservedRunningTime="2026-02-16 14:43:05.317322453 +0000 UTC m=+5984.644036181" Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.350197 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm" podStartSLOduration=3.819600097 podStartE2EDuration="16.350170218s" podCreationTimestamp="2026-02-16 14:42:49 +0000 UTC" firstStartedPulling="2026-02-16 14:42:51.693313087 +0000 UTC m=+5971.020026815" lastFinishedPulling="2026-02-16 14:43:04.223883188 +0000 UTC m=+5983.550596936" observedRunningTime="2026-02-16 14:43:05.33114646 +0000 UTC m=+5984.657860188" watchObservedRunningTime="2026-02-16 14:43:05.350170218 +0000 UTC m=+5984.676883946" Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.375591 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" podStartSLOduration=2.750486092 podStartE2EDuration="15.375560641s" podCreationTimestamp="2026-02-16 14:42:50 +0000 UTC" firstStartedPulling="2026-02-16 14:42:51.57277993 +0000 UTC m=+5970.899493658" lastFinishedPulling="2026-02-16 14:43:04.197854479 +0000 UTC m=+5983.524568207" observedRunningTime="2026-02-16 14:43:05.367144891 +0000 UTC m=+5984.693858619" watchObservedRunningTime="2026-02-16 14:43:05.375560641 +0000 UTC m=+5984.702274369" Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.478931 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-vtzp5" podStartSLOduration=2.7220535569999997 podStartE2EDuration="15.478903958s" podCreationTimestamp="2026-02-16 14:42:50 +0000 UTC" firstStartedPulling="2026-02-16 14:42:51.538312031 +0000 UTC m=+5970.865025759" lastFinishedPulling="2026-02-16 14:43:04.295162432 +0000 UTC m=+5983.621876160" observedRunningTime="2026-02-16 14:43:05.421538704 +0000 UTC m=+5984.748252432" watchObservedRunningTime="2026-02-16 14:43:05.478903958 +0000 UTC m=+5984.805617746" Feb 16 14:43:05 crc kubenswrapper[4816]: I0216 14:43:05.480633 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-wtmzg" podStartSLOduration=3.6785069 podStartE2EDuration="16.480621455s" podCreationTimestamp="2026-02-16 14:42:49 +0000 UTC" firstStartedPulling="2026-02-16 14:42:51.389866643 +0000 UTC m=+5970.716580371" lastFinishedPulling="2026-02-16 14:43:04.191981198 +0000 UTC m=+5983.518694926" observedRunningTime="2026-02-16 14:43:05.471095096 +0000 UTC m=+5984.797808824" watchObservedRunningTime="2026-02-16 14:43:05.480621455 +0000 UTC m=+5984.807335183" Feb 16 14:43:06 crc kubenswrapper[4816]: I0216 14:43:06.941095 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:43:06 crc kubenswrapper[4816]: I0216 14:43:06.941445 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:43:10 crc kubenswrapper[4816]: I0216 14:43:10.657322 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-z8fb9" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.128763 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.129611 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="28db99cc-bb32-4a18-a2a8-c8daafee6f8b" containerName="openstackclient" containerID="cri-o://23de134a2ee3fcf44c85a5ad41927b45621bb4b0672a9aea24be6b22ca8e14c4" gracePeriod=2 Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.144831 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.516434 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 16 14:43:13 crc kubenswrapper[4816]: E0216 14:43:13.517340 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28db99cc-bb32-4a18-a2a8-c8daafee6f8b" containerName="openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.517363 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="28db99cc-bb32-4a18-a2a8-c8daafee6f8b" containerName="openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.517557 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="28db99cc-bb32-4a18-a2a8-c8daafee6f8b" containerName="openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.518306 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.551954 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.557004 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/689b4789-2ea7-46da-930f-bf92141f0845-openstack-config-secret\") pod \"openstackclient\" (UID: \"689b4789-2ea7-46da-930f-bf92141f0845\") " pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.557092 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-267ts\" (UniqueName: \"kubernetes.io/projected/689b4789-2ea7-46da-930f-bf92141f0845-kube-api-access-267ts\") pod \"openstackclient\" (UID: \"689b4789-2ea7-46da-930f-bf92141f0845\") " pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.557335 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/689b4789-2ea7-46da-930f-bf92141f0845-openstack-config\") pod \"openstackclient\" (UID: \"689b4789-2ea7-46da-930f-bf92141f0845\") " pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.591248 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.592678 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.597246 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-z5wpt" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.615915 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.667938 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-267ts\" (UniqueName: \"kubernetes.io/projected/689b4789-2ea7-46da-930f-bf92141f0845-kube-api-access-267ts\") pod \"openstackclient\" (UID: \"689b4789-2ea7-46da-930f-bf92141f0845\") " pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.668056 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/689b4789-2ea7-46da-930f-bf92141f0845-openstack-config\") pod \"openstackclient\" (UID: \"689b4789-2ea7-46da-930f-bf92141f0845\") " pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.668953 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/689b4789-2ea7-46da-930f-bf92141f0845-openstack-config-secret\") pod \"openstackclient\" (UID: \"689b4789-2ea7-46da-930f-bf92141f0845\") " pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.677290 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/689b4789-2ea7-46da-930f-bf92141f0845-openstack-config\") pod \"openstackclient\" (UID: \"689b4789-2ea7-46da-930f-bf92141f0845\") " pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.687257 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/689b4789-2ea7-46da-930f-bf92141f0845-openstack-config-secret\") pod \"openstackclient\" (UID: \"689b4789-2ea7-46da-930f-bf92141f0845\") " pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.697486 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-267ts\" (UniqueName: \"kubernetes.io/projected/689b4789-2ea7-46da-930f-bf92141f0845-kube-api-access-267ts\") pod \"openstackclient\" (UID: \"689b4789-2ea7-46da-930f-bf92141f0845\") " pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.772174 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s4ln\" (UniqueName: \"kubernetes.io/projected/7e296da6-30fd-4fea-8dd5-c36394ca1c1c-kube-api-access-5s4ln\") pod \"kube-state-metrics-0\" (UID: \"7e296da6-30fd-4fea-8dd5-c36394ca1c1c\") " pod="openstack/kube-state-metrics-0" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.862130 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.874794 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s4ln\" (UniqueName: \"kubernetes.io/projected/7e296da6-30fd-4fea-8dd5-c36394ca1c1c-kube-api-access-5s4ln\") pod \"kube-state-metrics-0\" (UID: \"7e296da6-30fd-4fea-8dd5-c36394ca1c1c\") " pod="openstack/kube-state-metrics-0" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.910754 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s4ln\" (UniqueName: \"kubernetes.io/projected/7e296da6-30fd-4fea-8dd5-c36394ca1c1c-kube-api-access-5s4ln\") pod \"kube-state-metrics-0\" (UID: \"7e296da6-30fd-4fea-8dd5-c36394ca1c1c\") " pod="openstack/kube-state-metrics-0" Feb 16 14:43:13 crc kubenswrapper[4816]: I0216 14:43:13.921930 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.202150 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.204179 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.215804 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-8gz7q" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.216021 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.216067 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.228143 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.230041 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.234990 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.297729 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.297786 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.297844 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.297968 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.297990 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.298015 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jbg8\" (UniqueName: \"kubernetes.io/projected/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-kube-api-access-5jbg8\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.298039 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.407001 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.407061 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.407095 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.407178 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.407197 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.407218 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jbg8\" (UniqueName: \"kubernetes.io/projected/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-kube-api-access-5jbg8\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.407240 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.407729 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.425275 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.427114 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.427866 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.434203 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.458827 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jbg8\" (UniqueName: \"kubernetes.io/projected/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-kube-api-access-5jbg8\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.463299 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/01d77692-9dbe-4e2c-8b09-f7ce7a86efec-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"01d77692-9dbe-4e2c-8b09-f7ce7a86efec\") " pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.558679 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.754990 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.758937 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.764243 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.764385 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.764449 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.764537 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.774388 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.774429 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-lzpm7" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.774602 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.780243 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.822202 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.937543 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.937602 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.937631 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.937659 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.937794 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.937850 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fe5bc95f-30aa-4561-81f7-77ec6682470f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fe5bc95f-30aa-4561-81f7-77ec6682470f\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.937891 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2cgs\" (UniqueName: \"kubernetes.io/projected/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-kube-api-access-q2cgs\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.937927 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-config\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.937947 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:14 crc kubenswrapper[4816]: I0216 14:43:14.938178 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.016415 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047276 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047340 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047368 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047394 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047496 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047542 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fe5bc95f-30aa-4561-81f7-77ec6682470f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fe5bc95f-30aa-4561-81f7-77ec6682470f\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047580 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2cgs\" (UniqueName: \"kubernetes.io/projected/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-kube-api-access-q2cgs\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047626 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-config\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047659 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.047753 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.051671 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.052512 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.052927 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.060667 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.061110 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.061403 4816 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.061487 4816 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fe5bc95f-30aa-4561-81f7-77ec6682470f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fe5bc95f-30aa-4561-81f7-77ec6682470f\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8e32ccdcae9a55b765488a477696bb221b112144b0765f63e8f004e8023cfb6b/globalmount\"" pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.064498 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-config\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.070267 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.077144 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.088433 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2cgs\" (UniqueName: \"kubernetes.io/projected/e5cfee9b-cc72-4751-8c27-39c3ab9c3c96-kube-api-access-q2cgs\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.161613 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fe5bc95f-30aa-4561-81f7-77ec6682470f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fe5bc95f-30aa-4561-81f7-77ec6682470f\") pod \"prometheus-metric-storage-0\" (UID: \"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96\") " pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.237493 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 16 14:43:15 crc kubenswrapper[4816]: W0216 14:43:15.250975 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e296da6_30fd_4fea_8dd5_c36394ca1c1c.slice/crio-906954f6ddb8373ad93072c5ca3fbb36431fa4ae903a87c24732167799f9369c WatchSource:0}: Error finding container 906954f6ddb8373ad93072c5ca3fbb36431fa4ae903a87c24732167799f9369c: Status 404 returned error can't find the container with id 906954f6ddb8373ad93072c5ca3fbb36431fa4ae903a87c24732167799f9369c Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.396243 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.471327 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"689b4789-2ea7-46da-930f-bf92141f0845","Type":"ContainerStarted","Data":"4399fccd5d3546171e9d36b45e3ea128bcdc2a71f80e24483e5dc424a9c919f1"} Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.475490 4816 generic.go:334] "Generic (PLEG): container finished" podID="28db99cc-bb32-4a18-a2a8-c8daafee6f8b" containerID="23de134a2ee3fcf44c85a5ad41927b45621bb4b0672a9aea24be6b22ca8e14c4" exitCode=137 Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.488431 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7e296da6-30fd-4fea-8dd5-c36394ca1c1c","Type":"ContainerStarted","Data":"906954f6ddb8373ad93072c5ca3fbb36431fa4ae903a87c24732167799f9369c"} Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.561271 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.643030 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.703041 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config\") pod \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.703326 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config-secret\") pod \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.703436 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbk8m\" (UniqueName: \"kubernetes.io/projected/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-kube-api-access-bbk8m\") pod \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\" (UID: \"28db99cc-bb32-4a18-a2a8-c8daafee6f8b\") " Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.731141 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-kube-api-access-bbk8m" (OuterVolumeSpecName: "kube-api-access-bbk8m") pod "28db99cc-bb32-4a18-a2a8-c8daafee6f8b" (UID: "28db99cc-bb32-4a18-a2a8-c8daafee6f8b"). InnerVolumeSpecName "kube-api-access-bbk8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.734180 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "28db99cc-bb32-4a18-a2a8-c8daafee6f8b" (UID: "28db99cc-bb32-4a18-a2a8-c8daafee6f8b"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.806958 4816 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.807030 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbk8m\" (UniqueName: \"kubernetes.io/projected/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-kube-api-access-bbk8m\") on node \"crc\" DevicePath \"\"" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.835640 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "28db99cc-bb32-4a18-a2a8-c8daafee6f8b" (UID: "28db99cc-bb32-4a18-a2a8-c8daafee6f8b"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:43:15 crc kubenswrapper[4816]: I0216 14:43:15.911033 4816 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/28db99cc-bb32-4a18-a2a8-c8daafee6f8b-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.061413 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.505518 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7e296da6-30fd-4fea-8dd5-c36394ca1c1c","Type":"ContainerStarted","Data":"ec569f51dec07d4158c8b07e8ea6dbda681a39b45844eb99ea00ac550ab5fd3d"} Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.505829 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.508550 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"01d77692-9dbe-4e2c-8b09-f7ce7a86efec","Type":"ContainerStarted","Data":"b8b650b70b238947b80ff2cbe318321c7b46d17b70b90a7b09e05ddb73e8ccb2"} Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.510556 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"689b4789-2ea7-46da-930f-bf92141f0845","Type":"ContainerStarted","Data":"3599dabe909615ccef8eb0a80e6ff268fc7dc3b9978c0183643ab2b9a077585f"} Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.512441 4816 scope.go:117] "RemoveContainer" containerID="23de134a2ee3fcf44c85a5ad41927b45621bb4b0672a9aea24be6b22ca8e14c4" Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.512540 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.514479 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96","Type":"ContainerStarted","Data":"6f2b8840c7b7268e6031c99a81853d3cabc614a9d946e754495d208752eafefd"} Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.524941 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.873149369 podStartE2EDuration="3.524911861s" podCreationTimestamp="2026-02-16 14:43:13 +0000 UTC" firstStartedPulling="2026-02-16 14:43:15.285724753 +0000 UTC m=+5994.612438481" lastFinishedPulling="2026-02-16 14:43:15.937487245 +0000 UTC m=+5995.264200973" observedRunningTime="2026-02-16 14:43:16.524679355 +0000 UTC m=+5995.851393083" watchObservedRunningTime="2026-02-16 14:43:16.524911861 +0000 UTC m=+5995.851625589" Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.544107 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.544085684 podStartE2EDuration="3.544085684s" podCreationTimestamp="2026-02-16 14:43:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:43:16.541071442 +0000 UTC m=+5995.867785170" watchObservedRunningTime="2026-02-16 14:43:16.544085684 +0000 UTC m=+5995.870799422" Feb 16 14:43:16 crc kubenswrapper[4816]: I0216 14:43:16.546711 4816 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="28db99cc-bb32-4a18-a2a8-c8daafee6f8b" podUID="689b4789-2ea7-46da-930f-bf92141f0845" Feb 16 14:43:17 crc kubenswrapper[4816]: I0216 14:43:17.424405 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28db99cc-bb32-4a18-a2a8-c8daafee6f8b" path="/var/lib/kubelet/pods/28db99cc-bb32-4a18-a2a8-c8daafee6f8b/volumes" Feb 16 14:43:22 crc kubenswrapper[4816]: I0216 14:43:22.587316 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"01d77692-9dbe-4e2c-8b09-f7ce7a86efec","Type":"ContainerStarted","Data":"e0b89911dd689c69aea99dd808d5f59f29ba96ce222945960de24ebe1b597ad0"} Feb 16 14:43:23 crc kubenswrapper[4816]: I0216 14:43:23.599116 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96","Type":"ContainerStarted","Data":"0c5695e88573e331bf2610c86c8354b3fece629272aefabccbb05699169813fb"} Feb 16 14:43:23 crc kubenswrapper[4816]: I0216 14:43:23.926605 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 16 14:43:28 crc kubenswrapper[4816]: I0216 14:43:28.658487 4816 generic.go:334] "Generic (PLEG): container finished" podID="01d77692-9dbe-4e2c-8b09-f7ce7a86efec" containerID="e0b89911dd689c69aea99dd808d5f59f29ba96ce222945960de24ebe1b597ad0" exitCode=0 Feb 16 14:43:28 crc kubenswrapper[4816]: I0216 14:43:28.658564 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"01d77692-9dbe-4e2c-8b09-f7ce7a86efec","Type":"ContainerDied","Data":"e0b89911dd689c69aea99dd808d5f59f29ba96ce222945960de24ebe1b597ad0"} Feb 16 14:43:29 crc kubenswrapper[4816]: I0216 14:43:29.611749 4816 scope.go:117] "RemoveContainer" containerID="6a5d89bfe05b7c8e80ffa3fe765c5fe2f4746ca48c979a2e975a870d10945f64" Feb 16 14:43:29 crc kubenswrapper[4816]: I0216 14:43:29.640534 4816 scope.go:117] "RemoveContainer" containerID="1fb1923ff1f0b2a12075a07d9cd9bf7cec29ffabbb7beb655522b9d2b178fed2" Feb 16 14:43:29 crc kubenswrapper[4816]: I0216 14:43:29.689824 4816 generic.go:334] "Generic (PLEG): container finished" podID="e5cfee9b-cc72-4751-8c27-39c3ab9c3c96" containerID="0c5695e88573e331bf2610c86c8354b3fece629272aefabccbb05699169813fb" exitCode=0 Feb 16 14:43:29 crc kubenswrapper[4816]: I0216 14:43:29.689893 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96","Type":"ContainerDied","Data":"0c5695e88573e331bf2610c86c8354b3fece629272aefabccbb05699169813fb"} Feb 16 14:43:29 crc kubenswrapper[4816]: I0216 14:43:29.711422 4816 scope.go:117] "RemoveContainer" containerID="66b324fa5c294672742de09a17c09891c072d91238ef0970cd96af7a8861e62e" Feb 16 14:43:32 crc kubenswrapper[4816]: I0216 14:43:32.739172 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"01d77692-9dbe-4e2c-8b09-f7ce7a86efec","Type":"ContainerStarted","Data":"1431b30400063f2c45ae5b67d3310f6a4101312ad091334ea5eaa96d23b10ccd"} Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.787692 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96","Type":"ContainerStarted","Data":"3cfb86877510f5167392b58013975c3887387d35efe35dda463568984e68d1ab"} Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.790204 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"01d77692-9dbe-4e2c-8b09-f7ce7a86efec","Type":"ContainerStarted","Data":"ecd07c0b7836c652407708f031398d914f8b79591aaaf60021b9089862692520"} Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.790703 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.792927 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.828821 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=6.407672628 podStartE2EDuration="22.828796068s" podCreationTimestamp="2026-02-16 14:43:14 +0000 UTC" firstStartedPulling="2026-02-16 14:43:15.578877687 +0000 UTC m=+5994.905591415" lastFinishedPulling="2026-02-16 14:43:32.000001117 +0000 UTC m=+6011.326714855" observedRunningTime="2026-02-16 14:43:36.82151613 +0000 UTC m=+6016.148229898" watchObservedRunningTime="2026-02-16 14:43:36.828796068 +0000 UTC m=+6016.155509796" Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.940383 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.940456 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.940526 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.941143 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:43:36 crc kubenswrapper[4816]: I0216 14:43:36.941205 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" gracePeriod=600 Feb 16 14:43:37 crc kubenswrapper[4816]: E0216 14:43:37.012352 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb19d695_8c09_42cc_bc34_940019ab38dc.slice/crio-conmon-0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2.scope\": RecentStats: unable to find data in memory cache]" Feb 16 14:43:37 crc kubenswrapper[4816]: E0216 14:43:37.066327 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:43:37 crc kubenswrapper[4816]: I0216 14:43:37.808590 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" exitCode=0 Feb 16 14:43:37 crc kubenswrapper[4816]: I0216 14:43:37.808674 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2"} Feb 16 14:43:37 crc kubenswrapper[4816]: I0216 14:43:37.809058 4816 scope.go:117] "RemoveContainer" containerID="cd2cf9fe35bf79ffe24a1838af8ddd11554eb97681bc8bd3d107b775cf4e6572" Feb 16 14:43:37 crc kubenswrapper[4816]: I0216 14:43:37.809951 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:43:37 crc kubenswrapper[4816]: E0216 14:43:37.810216 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:43:40 crc kubenswrapper[4816]: I0216 14:43:40.844371 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96","Type":"ContainerStarted","Data":"d89fd9da59c42a87df26276d88811c25dacb683300cdd4f00e2429e36738581c"} Feb 16 14:43:42 crc kubenswrapper[4816]: I0216 14:43:42.866072 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e5cfee9b-cc72-4751-8c27-39c3ab9c3c96","Type":"ContainerStarted","Data":"5f62809d2a240347acdbe2e940d02c36057c201ef3b6d5f4cee441f2c46825e2"} Feb 16 14:43:42 crc kubenswrapper[4816]: I0216 14:43:42.903993 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.312107889 podStartE2EDuration="29.903962705s" podCreationTimestamp="2026-02-16 14:43:13 +0000 UTC" firstStartedPulling="2026-02-16 14:43:16.074461589 +0000 UTC m=+5995.401175317" lastFinishedPulling="2026-02-16 14:43:42.666316405 +0000 UTC m=+6021.993030133" observedRunningTime="2026-02-16 14:43:42.892345349 +0000 UTC m=+6022.219059087" watchObservedRunningTime="2026-02-16 14:43:42.903962705 +0000 UTC m=+6022.230676433" Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.085633 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-lwjpq"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.106482 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-7401-account-create-update-r6tsd"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.115783 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-7tqgn"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.124336 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-zrq7l"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.132194 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-lwjpq"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.140058 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-3a52-account-create-update-7j59j"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.148556 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-7401-account-create-update-r6tsd"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.160224 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-zrq7l"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.168428 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-7tqgn"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.176376 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-3a52-account-create-update-7j59j"] Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.411887 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05901662-9692-4c04-a7df-c902018f3013" path="/var/lib/kubelet/pods/05901662-9692-4c04-a7df-c902018f3013/volumes" Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.412527 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b" path="/var/lib/kubelet/pods/0f6855f9-e3bc-4c9b-b860-57e2ac72ff4b/volumes" Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.413754 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fe07d97-0b42-4b0d-933a-fd9fa23711f2" path="/var/lib/kubelet/pods/0fe07d97-0b42-4b0d-933a-fd9fa23711f2/volumes" Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.414420 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4be63f67-1999-48c3-a4be-1ca62ea68c7d" path="/var/lib/kubelet/pods/4be63f67-1999-48c3-a4be-1ca62ea68c7d/volumes" Feb 16 14:43:43 crc kubenswrapper[4816]: I0216 14:43:43.416175 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9981546-326e-49bb-9f26-c2e9e3ce4482" path="/var/lib/kubelet/pods/b9981546-326e-49bb-9f26-c2e9e3ce4482/volumes" Feb 16 14:43:44 crc kubenswrapper[4816]: I0216 14:43:44.029633 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-73d7-account-create-update-2scwn"] Feb 16 14:43:44 crc kubenswrapper[4816]: I0216 14:43:44.040022 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-73d7-account-create-update-2scwn"] Feb 16 14:43:45 crc kubenswrapper[4816]: I0216 14:43:45.397190 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:45 crc kubenswrapper[4816]: I0216 14:43:45.397529 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:45 crc kubenswrapper[4816]: I0216 14:43:45.414442 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80db2d83-4fd4-4f79-adf2-82058b98081c" path="/var/lib/kubelet/pods/80db2d83-4fd4-4f79-adf2-82058b98081c/volumes" Feb 16 14:43:45 crc kubenswrapper[4816]: I0216 14:43:45.415170 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:45 crc kubenswrapper[4816]: I0216 14:43:45.893991 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.037439 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.040023 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.046598 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.047198 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.051897 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.208420 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-log-httpd\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.208494 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.208513 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-config-data\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.208618 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mspdp\" (UniqueName: \"kubernetes.io/projected/419310d7-65ac-4798-abcc-e7561cbe8ae7-kube-api-access-mspdp\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.208694 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-run-httpd\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.208937 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-scripts\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.209091 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.310451 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-scripts\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.310505 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.310540 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-log-httpd\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.310571 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.310594 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-config-data\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.310679 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mspdp\" (UniqueName: \"kubernetes.io/projected/419310d7-65ac-4798-abcc-e7561cbe8ae7-kube-api-access-mspdp\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.310724 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-run-httpd\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.311581 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-run-httpd\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.311719 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-log-httpd\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.319172 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.319279 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-scripts\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.319514 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-config-data\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.325471 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.341094 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mspdp\" (UniqueName: \"kubernetes.io/projected/419310d7-65ac-4798-abcc-e7561cbe8ae7-kube-api-access-mspdp\") pod \"ceilometer-0\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.373498 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:43:47 crc kubenswrapper[4816]: I0216 14:43:47.952666 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:43:48 crc kubenswrapper[4816]: I0216 14:43:48.921232 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerStarted","Data":"1386843639fd2b9e8402e9702323d6d683191602f988941e531ee2cf29fb55d6"} Feb 16 14:43:48 crc kubenswrapper[4816]: I0216 14:43:48.921872 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerStarted","Data":"e742c3617d284e0be5c20f29e206cf5aef071b6feaf39667a066fa3436dc58ee"} Feb 16 14:43:49 crc kubenswrapper[4816]: I0216 14:43:49.399354 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:43:49 crc kubenswrapper[4816]: E0216 14:43:49.399620 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:43:49 crc kubenswrapper[4816]: I0216 14:43:49.933184 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerStarted","Data":"c100893e3aae34edc214ccf8a9793fd5bd2c803ee0e457296b9f1a4032af0539"} Feb 16 14:43:50 crc kubenswrapper[4816]: I0216 14:43:50.945408 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerStarted","Data":"2227a8de0db522f0a4da1180476f5f06a8f18153a3549ada2683ec9e13331abe"} Feb 16 14:43:52 crc kubenswrapper[4816]: I0216 14:43:52.966820 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerStarted","Data":"db30d225aa918e044d0c722cd502ed3b2d477460d1191aa714090ff8478ee46c"} Feb 16 14:43:52 crc kubenswrapper[4816]: I0216 14:43:52.967404 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 14:43:53 crc kubenswrapper[4816]: I0216 14:43:53.002268 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.217609214 podStartE2EDuration="6.002242197s" podCreationTimestamp="2026-02-16 14:43:47 +0000 UTC" firstStartedPulling="2026-02-16 14:43:47.955894102 +0000 UTC m=+6027.282607830" lastFinishedPulling="2026-02-16 14:43:51.740527085 +0000 UTC m=+6031.067240813" observedRunningTime="2026-02-16 14:43:52.988924094 +0000 UTC m=+6032.315637822" watchObservedRunningTime="2026-02-16 14:43:53.002242197 +0000 UTC m=+6032.328955925" Feb 16 14:43:54 crc kubenswrapper[4816]: I0216 14:43:54.050346 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sgf4b"] Feb 16 14:43:54 crc kubenswrapper[4816]: I0216 14:43:54.065808 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-sgf4b"] Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.048296 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pvf8t"] Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.051219 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.143836 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvf8t"] Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.184680 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx77z\" (UniqueName: \"kubernetes.io/projected/52afb6b3-043c-409d-a12b-30cbed642e07-kube-api-access-vx77z\") pod \"community-operators-pvf8t\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.184752 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-utilities\") pod \"community-operators-pvf8t\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.184782 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-catalog-content\") pod \"community-operators-pvf8t\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.286522 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-utilities\") pod \"community-operators-pvf8t\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.286594 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-catalog-content\") pod \"community-operators-pvf8t\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.286881 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx77z\" (UniqueName: \"kubernetes.io/projected/52afb6b3-043c-409d-a12b-30cbed642e07-kube-api-access-vx77z\") pod \"community-operators-pvf8t\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.287478 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-catalog-content\") pod \"community-operators-pvf8t\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.287819 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-utilities\") pod \"community-operators-pvf8t\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.329071 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx77z\" (UniqueName: \"kubernetes.io/projected/52afb6b3-043c-409d-a12b-30cbed642e07-kube-api-access-vx77z\") pod \"community-operators-pvf8t\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.415382 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f52dc84a-938b-4013-b9d1-fbcdb6360261" path="/var/lib/kubelet/pods/f52dc84a-938b-4013-b9d1-fbcdb6360261/volumes" Feb 16 14:43:55 crc kubenswrapper[4816]: I0216 14:43:55.469868 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:43:56 crc kubenswrapper[4816]: I0216 14:43:56.219893 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvf8t"] Feb 16 14:43:56 crc kubenswrapper[4816]: W0216 14:43:56.221505 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52afb6b3_043c_409d_a12b_30cbed642e07.slice/crio-4cd834f39a85de47e3776d319df6f6a5469a0d0fdcee6e08c256aa41ed0b47fb WatchSource:0}: Error finding container 4cd834f39a85de47e3776d319df6f6a5469a0d0fdcee6e08c256aa41ed0b47fb: Status 404 returned error can't find the container with id 4cd834f39a85de47e3776d319df6f6a5469a0d0fdcee6e08c256aa41ed0b47fb Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.008438 4816 generic.go:334] "Generic (PLEG): container finished" podID="52afb6b3-043c-409d-a12b-30cbed642e07" containerID="44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98" exitCode=0 Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.009013 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvf8t" event={"ID":"52afb6b3-043c-409d-a12b-30cbed642e07","Type":"ContainerDied","Data":"44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98"} Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.009133 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvf8t" event={"ID":"52afb6b3-043c-409d-a12b-30cbed642e07","Type":"ContainerStarted","Data":"4cd834f39a85de47e3776d319df6f6a5469a0d0fdcee6e08c256aa41ed0b47fb"} Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.455253 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nb9tc"] Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.457731 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.466287 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nb9tc"] Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.611936 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-utilities\") pod \"certified-operators-nb9tc\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.612067 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-catalog-content\") pod \"certified-operators-nb9tc\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.612178 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clwm5\" (UniqueName: \"kubernetes.io/projected/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-kube-api-access-clwm5\") pod \"certified-operators-nb9tc\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.719470 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clwm5\" (UniqueName: \"kubernetes.io/projected/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-kube-api-access-clwm5\") pod \"certified-operators-nb9tc\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.719584 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-utilities\") pod \"certified-operators-nb9tc\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.719722 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-catalog-content\") pod \"certified-operators-nb9tc\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.720280 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-catalog-content\") pod \"certified-operators-nb9tc\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.720359 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-utilities\") pod \"certified-operators-nb9tc\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.742859 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clwm5\" (UniqueName: \"kubernetes.io/projected/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-kube-api-access-clwm5\") pod \"certified-operators-nb9tc\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:57 crc kubenswrapper[4816]: I0216 14:43:57.788137 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.032762 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvf8t" event={"ID":"52afb6b3-043c-409d-a12b-30cbed642e07","Type":"ContainerStarted","Data":"46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6"} Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.451468 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nb9tc"] Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.532890 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-w6ndj"] Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.536455 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-w6ndj" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.552571 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-w6ndj"] Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.601231 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe90291c-c1f0-4dec-b1a9-4ab410230979-operator-scripts\") pod \"aodh-db-create-w6ndj\" (UID: \"fe90291c-c1f0-4dec-b1a9-4ab410230979\") " pod="openstack/aodh-db-create-w6ndj" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.601413 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5r88\" (UniqueName: \"kubernetes.io/projected/fe90291c-c1f0-4dec-b1a9-4ab410230979-kube-api-access-m5r88\") pod \"aodh-db-create-w6ndj\" (UID: \"fe90291c-c1f0-4dec-b1a9-4ab410230979\") " pod="openstack/aodh-db-create-w6ndj" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.626932 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-20a6-account-create-update-w6mcg"] Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.628406 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.631572 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.638622 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-20a6-account-create-update-w6mcg"] Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.729891 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhhk5\" (UniqueName: \"kubernetes.io/projected/979a1247-b91e-4f6d-9046-c772f11fb1ec-kube-api-access-nhhk5\") pod \"aodh-20a6-account-create-update-w6mcg\" (UID: \"979a1247-b91e-4f6d-9046-c772f11fb1ec\") " pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.730057 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe90291c-c1f0-4dec-b1a9-4ab410230979-operator-scripts\") pod \"aodh-db-create-w6ndj\" (UID: \"fe90291c-c1f0-4dec-b1a9-4ab410230979\") " pod="openstack/aodh-db-create-w6ndj" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.730246 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5r88\" (UniqueName: \"kubernetes.io/projected/fe90291c-c1f0-4dec-b1a9-4ab410230979-kube-api-access-m5r88\") pod \"aodh-db-create-w6ndj\" (UID: \"fe90291c-c1f0-4dec-b1a9-4ab410230979\") " pod="openstack/aodh-db-create-w6ndj" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.730333 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979a1247-b91e-4f6d-9046-c772f11fb1ec-operator-scripts\") pod \"aodh-20a6-account-create-update-w6mcg\" (UID: \"979a1247-b91e-4f6d-9046-c772f11fb1ec\") " pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.732474 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe90291c-c1f0-4dec-b1a9-4ab410230979-operator-scripts\") pod \"aodh-db-create-w6ndj\" (UID: \"fe90291c-c1f0-4dec-b1a9-4ab410230979\") " pod="openstack/aodh-db-create-w6ndj" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.761827 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5r88\" (UniqueName: \"kubernetes.io/projected/fe90291c-c1f0-4dec-b1a9-4ab410230979-kube-api-access-m5r88\") pod \"aodh-db-create-w6ndj\" (UID: \"fe90291c-c1f0-4dec-b1a9-4ab410230979\") " pod="openstack/aodh-db-create-w6ndj" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.832315 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhhk5\" (UniqueName: \"kubernetes.io/projected/979a1247-b91e-4f6d-9046-c772f11fb1ec-kube-api-access-nhhk5\") pod \"aodh-20a6-account-create-update-w6mcg\" (UID: \"979a1247-b91e-4f6d-9046-c772f11fb1ec\") " pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.832579 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979a1247-b91e-4f6d-9046-c772f11fb1ec-operator-scripts\") pod \"aodh-20a6-account-create-update-w6mcg\" (UID: \"979a1247-b91e-4f6d-9046-c772f11fb1ec\") " pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.833633 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979a1247-b91e-4f6d-9046-c772f11fb1ec-operator-scripts\") pod \"aodh-20a6-account-create-update-w6mcg\" (UID: \"979a1247-b91e-4f6d-9046-c772f11fb1ec\") " pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.914804 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhhk5\" (UniqueName: \"kubernetes.io/projected/979a1247-b91e-4f6d-9046-c772f11fb1ec-kube-api-access-nhhk5\") pod \"aodh-20a6-account-create-update-w6mcg\" (UID: \"979a1247-b91e-4f6d-9046-c772f11fb1ec\") " pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.919391 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-w6ndj" Feb 16 14:43:58 crc kubenswrapper[4816]: I0216 14:43:58.958224 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:43:59 crc kubenswrapper[4816]: I0216 14:43:59.056021 4816 generic.go:334] "Generic (PLEG): container finished" podID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerID="db9b0e43ab9a32dbb14f53347ceaa7c3ad6b14a41d2e9b1fe3dc5d63d6d3a40c" exitCode=0 Feb 16 14:43:59 crc kubenswrapper[4816]: I0216 14:43:59.056197 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nb9tc" event={"ID":"6fd7e6cd-5936-4e4b-a11e-42b171c5b367","Type":"ContainerDied","Data":"db9b0e43ab9a32dbb14f53347ceaa7c3ad6b14a41d2e9b1fe3dc5d63d6d3a40c"} Feb 16 14:43:59 crc kubenswrapper[4816]: I0216 14:43:59.056239 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nb9tc" event={"ID":"6fd7e6cd-5936-4e4b-a11e-42b171c5b367","Type":"ContainerStarted","Data":"299eb09c7c2e0486e9c2c13c5793000ba56eeeb002966e6c531564643e0c1145"} Feb 16 14:43:59 crc kubenswrapper[4816]: I0216 14:43:59.600165 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-w6ndj"] Feb 16 14:43:59 crc kubenswrapper[4816]: W0216 14:43:59.602326 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe90291c_c1f0_4dec_b1a9_4ab410230979.slice/crio-3dae9396856711feef7fbb69a136fda8e8d0aec77739c54cabd5883e666117e7 WatchSource:0}: Error finding container 3dae9396856711feef7fbb69a136fda8e8d0aec77739c54cabd5883e666117e7: Status 404 returned error can't find the container with id 3dae9396856711feef7fbb69a136fda8e8d0aec77739c54cabd5883e666117e7 Feb 16 14:43:59 crc kubenswrapper[4816]: I0216 14:43:59.741064 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-20a6-account-create-update-w6mcg"] Feb 16 14:44:00 crc kubenswrapper[4816]: I0216 14:44:00.066265 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-w6ndj" event={"ID":"fe90291c-c1f0-4dec-b1a9-4ab410230979","Type":"ContainerStarted","Data":"8c92918eb2ed9c27fe25adf206d6b97fb8bb34de57375601747ea605b6c694a2"} Feb 16 14:44:00 crc kubenswrapper[4816]: I0216 14:44:00.066598 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-w6ndj" event={"ID":"fe90291c-c1f0-4dec-b1a9-4ab410230979","Type":"ContainerStarted","Data":"3dae9396856711feef7fbb69a136fda8e8d0aec77739c54cabd5883e666117e7"} Feb 16 14:44:00 crc kubenswrapper[4816]: I0216 14:44:00.068498 4816 generic.go:334] "Generic (PLEG): container finished" podID="52afb6b3-043c-409d-a12b-30cbed642e07" containerID="46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6" exitCode=0 Feb 16 14:44:00 crc kubenswrapper[4816]: I0216 14:44:00.068590 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvf8t" event={"ID":"52afb6b3-043c-409d-a12b-30cbed642e07","Type":"ContainerDied","Data":"46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6"} Feb 16 14:44:00 crc kubenswrapper[4816]: I0216 14:44:00.074013 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-20a6-account-create-update-w6mcg" event={"ID":"979a1247-b91e-4f6d-9046-c772f11fb1ec","Type":"ContainerStarted","Data":"3cbf90d198e46988a3d799b6f31a0bdd69680c50178712411b495376ed179ff4"} Feb 16 14:44:00 crc kubenswrapper[4816]: I0216 14:44:00.074060 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-20a6-account-create-update-w6mcg" event={"ID":"979a1247-b91e-4f6d-9046-c772f11fb1ec","Type":"ContainerStarted","Data":"63154a91daf91234c299c1fece66c04d728cb4014da885b2930eb5de91711cad"} Feb 16 14:44:00 crc kubenswrapper[4816]: I0216 14:44:00.086920 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-create-w6ndj" podStartSLOduration=2.086898239 podStartE2EDuration="2.086898239s" podCreationTimestamp="2026-02-16 14:43:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:44:00.080738821 +0000 UTC m=+6039.407452539" watchObservedRunningTime="2026-02-16 14:44:00.086898239 +0000 UTC m=+6039.413611967" Feb 16 14:44:00 crc kubenswrapper[4816]: I0216 14:44:00.114233 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-20a6-account-create-update-w6mcg" podStartSLOduration=2.114209713 podStartE2EDuration="2.114209713s" podCreationTimestamp="2026-02-16 14:43:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:44:00.113210596 +0000 UTC m=+6039.439924324" watchObservedRunningTime="2026-02-16 14:44:00.114209713 +0000 UTC m=+6039.440923431" Feb 16 14:44:01 crc kubenswrapper[4816]: I0216 14:44:01.084071 4816 generic.go:334] "Generic (PLEG): container finished" podID="fe90291c-c1f0-4dec-b1a9-4ab410230979" containerID="8c92918eb2ed9c27fe25adf206d6b97fb8bb34de57375601747ea605b6c694a2" exitCode=0 Feb 16 14:44:01 crc kubenswrapper[4816]: I0216 14:44:01.084174 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-w6ndj" event={"ID":"fe90291c-c1f0-4dec-b1a9-4ab410230979","Type":"ContainerDied","Data":"8c92918eb2ed9c27fe25adf206d6b97fb8bb34de57375601747ea605b6c694a2"} Feb 16 14:44:01 crc kubenswrapper[4816]: I0216 14:44:01.088671 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nb9tc" event={"ID":"6fd7e6cd-5936-4e4b-a11e-42b171c5b367","Type":"ContainerStarted","Data":"6ef11496c831ea23889f3b717f71459b0681a4b447772c03eec5e0df98ba7569"} Feb 16 14:44:01 crc kubenswrapper[4816]: I0216 14:44:01.090582 4816 generic.go:334] "Generic (PLEG): container finished" podID="979a1247-b91e-4f6d-9046-c772f11fb1ec" containerID="3cbf90d198e46988a3d799b6f31a0bdd69680c50178712411b495376ed179ff4" exitCode=0 Feb 16 14:44:01 crc kubenswrapper[4816]: I0216 14:44:01.090612 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-20a6-account-create-update-w6mcg" event={"ID":"979a1247-b91e-4f6d-9046-c772f11fb1ec","Type":"ContainerDied","Data":"3cbf90d198e46988a3d799b6f31a0bdd69680c50178712411b495376ed179ff4"} Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.103341 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvf8t" event={"ID":"52afb6b3-043c-409d-a12b-30cbed642e07","Type":"ContainerStarted","Data":"05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364"} Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.131119 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pvf8t" podStartSLOduration=2.67332593 podStartE2EDuration="7.131094006s" podCreationTimestamp="2026-02-16 14:43:55 +0000 UTC" firstStartedPulling="2026-02-16 14:43:57.011431843 +0000 UTC m=+6036.338145571" lastFinishedPulling="2026-02-16 14:44:01.469199919 +0000 UTC m=+6040.795913647" observedRunningTime="2026-02-16 14:44:02.127329774 +0000 UTC m=+6041.454043502" watchObservedRunningTime="2026-02-16 14:44:02.131094006 +0000 UTC m=+6041.457807744" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.399229 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:44:02 crc kubenswrapper[4816]: E0216 14:44:02.399927 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.644266 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-w6ndj" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.649330 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.778285 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe90291c-c1f0-4dec-b1a9-4ab410230979-operator-scripts\") pod \"fe90291c-c1f0-4dec-b1a9-4ab410230979\" (UID: \"fe90291c-c1f0-4dec-b1a9-4ab410230979\") " Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.778356 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhhk5\" (UniqueName: \"kubernetes.io/projected/979a1247-b91e-4f6d-9046-c772f11fb1ec-kube-api-access-nhhk5\") pod \"979a1247-b91e-4f6d-9046-c772f11fb1ec\" (UID: \"979a1247-b91e-4f6d-9046-c772f11fb1ec\") " Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.778536 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5r88\" (UniqueName: \"kubernetes.io/projected/fe90291c-c1f0-4dec-b1a9-4ab410230979-kube-api-access-m5r88\") pod \"fe90291c-c1f0-4dec-b1a9-4ab410230979\" (UID: \"fe90291c-c1f0-4dec-b1a9-4ab410230979\") " Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.778621 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979a1247-b91e-4f6d-9046-c772f11fb1ec-operator-scripts\") pod \"979a1247-b91e-4f6d-9046-c772f11fb1ec\" (UID: \"979a1247-b91e-4f6d-9046-c772f11fb1ec\") " Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.779140 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe90291c-c1f0-4dec-b1a9-4ab410230979-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fe90291c-c1f0-4dec-b1a9-4ab410230979" (UID: "fe90291c-c1f0-4dec-b1a9-4ab410230979"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.779201 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/979a1247-b91e-4f6d-9046-c772f11fb1ec-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "979a1247-b91e-4f6d-9046-c772f11fb1ec" (UID: "979a1247-b91e-4f6d-9046-c772f11fb1ec"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.784102 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe90291c-c1f0-4dec-b1a9-4ab410230979-kube-api-access-m5r88" (OuterVolumeSpecName: "kube-api-access-m5r88") pod "fe90291c-c1f0-4dec-b1a9-4ab410230979" (UID: "fe90291c-c1f0-4dec-b1a9-4ab410230979"). InnerVolumeSpecName "kube-api-access-m5r88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.784505 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/979a1247-b91e-4f6d-9046-c772f11fb1ec-kube-api-access-nhhk5" (OuterVolumeSpecName: "kube-api-access-nhhk5") pod "979a1247-b91e-4f6d-9046-c772f11fb1ec" (UID: "979a1247-b91e-4f6d-9046-c772f11fb1ec"). InnerVolumeSpecName "kube-api-access-nhhk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.880787 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe90291c-c1f0-4dec-b1a9-4ab410230979-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.881089 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhhk5\" (UniqueName: \"kubernetes.io/projected/979a1247-b91e-4f6d-9046-c772f11fb1ec-kube-api-access-nhhk5\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.881100 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5r88\" (UniqueName: \"kubernetes.io/projected/fe90291c-c1f0-4dec-b1a9-4ab410230979-kube-api-access-m5r88\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:02 crc kubenswrapper[4816]: I0216 14:44:02.881111 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979a1247-b91e-4f6d-9046-c772f11fb1ec-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:03 crc kubenswrapper[4816]: I0216 14:44:03.117885 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-20a6-account-create-update-w6mcg" Feb 16 14:44:03 crc kubenswrapper[4816]: I0216 14:44:03.119329 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-20a6-account-create-update-w6mcg" event={"ID":"979a1247-b91e-4f6d-9046-c772f11fb1ec","Type":"ContainerDied","Data":"63154a91daf91234c299c1fece66c04d728cb4014da885b2930eb5de91711cad"} Feb 16 14:44:03 crc kubenswrapper[4816]: I0216 14:44:03.119370 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63154a91daf91234c299c1fece66c04d728cb4014da885b2930eb5de91711cad" Feb 16 14:44:03 crc kubenswrapper[4816]: I0216 14:44:03.120889 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-w6ndj" event={"ID":"fe90291c-c1f0-4dec-b1a9-4ab410230979","Type":"ContainerDied","Data":"3dae9396856711feef7fbb69a136fda8e8d0aec77739c54cabd5883e666117e7"} Feb 16 14:44:03 crc kubenswrapper[4816]: I0216 14:44:03.120912 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3dae9396856711feef7fbb69a136fda8e8d0aec77739c54cabd5883e666117e7" Feb 16 14:44:03 crc kubenswrapper[4816]: I0216 14:44:03.120954 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-w6ndj" Feb 16 14:44:05 crc kubenswrapper[4816]: I0216 14:44:05.151062 4816 generic.go:334] "Generic (PLEG): container finished" podID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerID="6ef11496c831ea23889f3b717f71459b0681a4b447772c03eec5e0df98ba7569" exitCode=0 Feb 16 14:44:05 crc kubenswrapper[4816]: I0216 14:44:05.151298 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nb9tc" event={"ID":"6fd7e6cd-5936-4e4b-a11e-42b171c5b367","Type":"ContainerDied","Data":"6ef11496c831ea23889f3b717f71459b0681a4b447772c03eec5e0df98ba7569"} Feb 16 14:44:05 crc kubenswrapper[4816]: I0216 14:44:05.470404 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:44:05 crc kubenswrapper[4816]: I0216 14:44:05.470761 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:44:06 crc kubenswrapper[4816]: I0216 14:44:06.163609 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nb9tc" event={"ID":"6fd7e6cd-5936-4e4b-a11e-42b171c5b367","Type":"ContainerStarted","Data":"b45fd7aaa556bb9ff1015ec059f64c4469d640f67063c66fc0e2d2907305efea"} Feb 16 14:44:06 crc kubenswrapper[4816]: I0216 14:44:06.190373 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nb9tc" podStartSLOduration=2.428463094 podStartE2EDuration="9.190351897s" podCreationTimestamp="2026-02-16 14:43:57 +0000 UTC" firstStartedPulling="2026-02-16 14:43:59.060341498 +0000 UTC m=+6038.387055216" lastFinishedPulling="2026-02-16 14:44:05.822230291 +0000 UTC m=+6045.148944019" observedRunningTime="2026-02-16 14:44:06.181555478 +0000 UTC m=+6045.508269236" watchObservedRunningTime="2026-02-16 14:44:06.190351897 +0000 UTC m=+6045.517065615" Feb 16 14:44:06 crc kubenswrapper[4816]: I0216 14:44:06.520933 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-pvf8t" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" containerName="registry-server" probeResult="failure" output=< Feb 16 14:44:06 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 14:44:06 crc kubenswrapper[4816]: > Feb 16 14:44:07 crc kubenswrapper[4816]: I0216 14:44:07.788248 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:44:07 crc kubenswrapper[4816]: I0216 14:44:07.788496 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:44:07 crc kubenswrapper[4816]: I0216 14:44:07.839439 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.016135 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-8t7k2"] Feb 16 14:44:09 crc kubenswrapper[4816]: E0216 14:44:09.017028 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="979a1247-b91e-4f6d-9046-c772f11fb1ec" containerName="mariadb-account-create-update" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.017065 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="979a1247-b91e-4f6d-9046-c772f11fb1ec" containerName="mariadb-account-create-update" Feb 16 14:44:09 crc kubenswrapper[4816]: E0216 14:44:09.017083 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe90291c-c1f0-4dec-b1a9-4ab410230979" containerName="mariadb-database-create" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.017089 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe90291c-c1f0-4dec-b1a9-4ab410230979" containerName="mariadb-database-create" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.017288 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="979a1247-b91e-4f6d-9046-c772f11fb1ec" containerName="mariadb-account-create-update" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.017315 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe90291c-c1f0-4dec-b1a9-4ab410230979" containerName="mariadb-database-create" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.018173 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.020594 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.020683 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-x2wt4" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.020849 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.020851 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.027590 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-8t7k2"] Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.131061 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-config-data\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.131103 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-scripts\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.131147 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-combined-ca-bundle\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.131170 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhbz8\" (UniqueName: \"kubernetes.io/projected/c58e5974-3461-4e24-9576-60f97be5fcb3-kube-api-access-xhbz8\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.232976 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-config-data\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.233024 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-scripts\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.233065 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-combined-ca-bundle\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.233087 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhbz8\" (UniqueName: \"kubernetes.io/projected/c58e5974-3461-4e24-9576-60f97be5fcb3-kube-api-access-xhbz8\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.243341 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-scripts\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.244125 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-combined-ca-bundle\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.244499 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-config-data\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.253467 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhbz8\" (UniqueName: \"kubernetes.io/projected/c58e5974-3461-4e24-9576-60f97be5fcb3-kube-api-access-xhbz8\") pod \"aodh-db-sync-8t7k2\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.394924 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:09 crc kubenswrapper[4816]: I0216 14:44:09.958253 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-8t7k2"] Feb 16 14:44:09 crc kubenswrapper[4816]: W0216 14:44:09.969844 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc58e5974_3461_4e24_9576_60f97be5fcb3.slice/crio-2824c2690e571aac436967c82a58348360c11c7af914ae3aeaadcd6cfb96b15a WatchSource:0}: Error finding container 2824c2690e571aac436967c82a58348360c11c7af914ae3aeaadcd6cfb96b15a: Status 404 returned error can't find the container with id 2824c2690e571aac436967c82a58348360c11c7af914ae3aeaadcd6cfb96b15a Feb 16 14:44:10 crc kubenswrapper[4816]: I0216 14:44:10.204890 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-8t7k2" event={"ID":"c58e5974-3461-4e24-9576-60f97be5fcb3","Type":"ContainerStarted","Data":"2824c2690e571aac436967c82a58348360c11c7af914ae3aeaadcd6cfb96b15a"} Feb 16 14:44:13 crc kubenswrapper[4816]: I0216 14:44:13.057074 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-tqhhr"] Feb 16 14:44:13 crc kubenswrapper[4816]: I0216 14:44:13.074389 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8cdtd"] Feb 16 14:44:13 crc kubenswrapper[4816]: I0216 14:44:13.099040 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8cdtd"] Feb 16 14:44:13 crc kubenswrapper[4816]: I0216 14:44:13.111301 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-tqhhr"] Feb 16 14:44:13 crc kubenswrapper[4816]: I0216 14:44:13.525530 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2158e136-e15e-4388-aaba-4a3a6d936dbe" path="/var/lib/kubelet/pods/2158e136-e15e-4388-aaba-4a3a6d936dbe/volumes" Feb 16 14:44:13 crc kubenswrapper[4816]: I0216 14:44:13.526599 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eddabf2-6f16-4466-a536-79daaa13fef8" path="/var/lib/kubelet/pods/7eddabf2-6f16-4466-a536-79daaa13fef8/volumes" Feb 16 14:44:15 crc kubenswrapper[4816]: I0216 14:44:15.253303 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-8t7k2" event={"ID":"c58e5974-3461-4e24-9576-60f97be5fcb3","Type":"ContainerStarted","Data":"e2fb61a6c8f2ce7de57b093d558b02c6391fa10e125688bd79bed0f51155f8db"} Feb 16 14:44:15 crc kubenswrapper[4816]: I0216 14:44:15.275073 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-8t7k2" podStartSLOduration=2.40556126 podStartE2EDuration="7.275055203s" podCreationTimestamp="2026-02-16 14:44:08 +0000 UTC" firstStartedPulling="2026-02-16 14:44:09.975905516 +0000 UTC m=+6049.302619254" lastFinishedPulling="2026-02-16 14:44:14.845399469 +0000 UTC m=+6054.172113197" observedRunningTime="2026-02-16 14:44:15.268416483 +0000 UTC m=+6054.595130221" watchObservedRunningTime="2026-02-16 14:44:15.275055203 +0000 UTC m=+6054.601768931" Feb 16 14:44:15 crc kubenswrapper[4816]: I0216 14:44:15.399285 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:44:15 crc kubenswrapper[4816]: E0216 14:44:15.400241 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:44:15 crc kubenswrapper[4816]: I0216 14:44:15.533447 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:44:15 crc kubenswrapper[4816]: I0216 14:44:15.584813 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:44:15 crc kubenswrapper[4816]: I0216 14:44:15.770954 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvf8t"] Feb 16 14:44:17 crc kubenswrapper[4816]: I0216 14:44:17.274875 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pvf8t" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" containerName="registry-server" containerID="cri-o://05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364" gracePeriod=2 Feb 16 14:44:17 crc kubenswrapper[4816]: I0216 14:44:17.388813 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 16 14:44:17 crc kubenswrapper[4816]: I0216 14:44:17.808680 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:44:17 crc kubenswrapper[4816]: I0216 14:44:17.841456 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:44:17 crc kubenswrapper[4816]: I0216 14:44:17.992278 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-catalog-content\") pod \"52afb6b3-043c-409d-a12b-30cbed642e07\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " Feb 16 14:44:17 crc kubenswrapper[4816]: I0216 14:44:17.999105 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vx77z\" (UniqueName: \"kubernetes.io/projected/52afb6b3-043c-409d-a12b-30cbed642e07-kube-api-access-vx77z\") pod \"52afb6b3-043c-409d-a12b-30cbed642e07\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " Feb 16 14:44:17 crc kubenswrapper[4816]: I0216 14:44:17.999165 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-utilities\") pod \"52afb6b3-043c-409d-a12b-30cbed642e07\" (UID: \"52afb6b3-043c-409d-a12b-30cbed642e07\") " Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:17.999788 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-utilities" (OuterVolumeSpecName: "utilities") pod "52afb6b3-043c-409d-a12b-30cbed642e07" (UID: "52afb6b3-043c-409d-a12b-30cbed642e07"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.011108 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52afb6b3-043c-409d-a12b-30cbed642e07-kube-api-access-vx77z" (OuterVolumeSpecName: "kube-api-access-vx77z") pod "52afb6b3-043c-409d-a12b-30cbed642e07" (UID: "52afb6b3-043c-409d-a12b-30cbed642e07"). InnerVolumeSpecName "kube-api-access-vx77z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.040573 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "52afb6b3-043c-409d-a12b-30cbed642e07" (UID: "52afb6b3-043c-409d-a12b-30cbed642e07"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.102057 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.102422 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vx77z\" (UniqueName: \"kubernetes.io/projected/52afb6b3-043c-409d-a12b-30cbed642e07-kube-api-access-vx77z\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.102551 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52afb6b3-043c-409d-a12b-30cbed642e07-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.293026 4816 generic.go:334] "Generic (PLEG): container finished" podID="c58e5974-3461-4e24-9576-60f97be5fcb3" containerID="e2fb61a6c8f2ce7de57b093d558b02c6391fa10e125688bd79bed0f51155f8db" exitCode=0 Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.293111 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-8t7k2" event={"ID":"c58e5974-3461-4e24-9576-60f97be5fcb3","Type":"ContainerDied","Data":"e2fb61a6c8f2ce7de57b093d558b02c6391fa10e125688bd79bed0f51155f8db"} Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.299440 4816 generic.go:334] "Generic (PLEG): container finished" podID="52afb6b3-043c-409d-a12b-30cbed642e07" containerID="05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364" exitCode=0 Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.299495 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvf8t" event={"ID":"52afb6b3-043c-409d-a12b-30cbed642e07","Type":"ContainerDied","Data":"05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364"} Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.299511 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvf8t" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.299526 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvf8t" event={"ID":"52afb6b3-043c-409d-a12b-30cbed642e07","Type":"ContainerDied","Data":"4cd834f39a85de47e3776d319df6f6a5469a0d0fdcee6e08c256aa41ed0b47fb"} Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.299550 4816 scope.go:117] "RemoveContainer" containerID="05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.337234 4816 scope.go:117] "RemoveContainer" containerID="46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.341702 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvf8t"] Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.351558 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pvf8t"] Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.357945 4816 scope.go:117] "RemoveContainer" containerID="44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.400747 4816 scope.go:117] "RemoveContainer" containerID="05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364" Feb 16 14:44:18 crc kubenswrapper[4816]: E0216 14:44:18.401276 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364\": container with ID starting with 05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364 not found: ID does not exist" containerID="05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.401306 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364"} err="failed to get container status \"05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364\": rpc error: code = NotFound desc = could not find container \"05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364\": container with ID starting with 05f13f13a1772dc72ca66a69f903ed82be56516dbb1f60b573ea748de77d1364 not found: ID does not exist" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.401325 4816 scope.go:117] "RemoveContainer" containerID="46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6" Feb 16 14:44:18 crc kubenswrapper[4816]: E0216 14:44:18.402056 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6\": container with ID starting with 46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6 not found: ID does not exist" containerID="46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.402136 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6"} err="failed to get container status \"46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6\": rpc error: code = NotFound desc = could not find container \"46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6\": container with ID starting with 46ed627066deb404519bca226c253c2255a8b59c4f5c551773e9c11c85a797a6 not found: ID does not exist" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.402164 4816 scope.go:117] "RemoveContainer" containerID="44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98" Feb 16 14:44:18 crc kubenswrapper[4816]: E0216 14:44:18.402421 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98\": container with ID starting with 44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98 not found: ID does not exist" containerID="44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98" Feb 16 14:44:18 crc kubenswrapper[4816]: I0216 14:44:18.402447 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98"} err="failed to get container status \"44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98\": rpc error: code = NotFound desc = could not find container \"44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98\": container with ID starting with 44c9d3219cbd49427ed6d7506b1220f9d68aa1f918bb6ce05e58635ee6d55b98 not found: ID does not exist" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.420465 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" path="/var/lib/kubelet/pods/52afb6b3-043c-409d-a12b-30cbed642e07/volumes" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.697918 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.867264 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhbz8\" (UniqueName: \"kubernetes.io/projected/c58e5974-3461-4e24-9576-60f97be5fcb3-kube-api-access-xhbz8\") pod \"c58e5974-3461-4e24-9576-60f97be5fcb3\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.868137 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-scripts\") pod \"c58e5974-3461-4e24-9576-60f97be5fcb3\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.868193 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-combined-ca-bundle\") pod \"c58e5974-3461-4e24-9576-60f97be5fcb3\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.868420 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-config-data\") pod \"c58e5974-3461-4e24-9576-60f97be5fcb3\" (UID: \"c58e5974-3461-4e24-9576-60f97be5fcb3\") " Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.873372 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c58e5974-3461-4e24-9576-60f97be5fcb3-kube-api-access-xhbz8" (OuterVolumeSpecName: "kube-api-access-xhbz8") pod "c58e5974-3461-4e24-9576-60f97be5fcb3" (UID: "c58e5974-3461-4e24-9576-60f97be5fcb3"). InnerVolumeSpecName "kube-api-access-xhbz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.873947 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-scripts" (OuterVolumeSpecName: "scripts") pod "c58e5974-3461-4e24-9576-60f97be5fcb3" (UID: "c58e5974-3461-4e24-9576-60f97be5fcb3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.895372 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c58e5974-3461-4e24-9576-60f97be5fcb3" (UID: "c58e5974-3461-4e24-9576-60f97be5fcb3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.923436 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-config-data" (OuterVolumeSpecName: "config-data") pod "c58e5974-3461-4e24-9576-60f97be5fcb3" (UID: "c58e5974-3461-4e24-9576-60f97be5fcb3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.971268 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhbz8\" (UniqueName: \"kubernetes.io/projected/c58e5974-3461-4e24-9576-60f97be5fcb3-kube-api-access-xhbz8\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.971306 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.971319 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.971327 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58e5974-3461-4e24-9576-60f97be5fcb3-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.977841 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nb9tc"] Feb 16 14:44:19 crc kubenswrapper[4816]: I0216 14:44:19.978069 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nb9tc" podUID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerName="registry-server" containerID="cri-o://b45fd7aaa556bb9ff1015ec059f64c4469d640f67063c66fc0e2d2907305efea" gracePeriod=2 Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.342788 4816 generic.go:334] "Generic (PLEG): container finished" podID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerID="b45fd7aaa556bb9ff1015ec059f64c4469d640f67063c66fc0e2d2907305efea" exitCode=0 Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.342861 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nb9tc" event={"ID":"6fd7e6cd-5936-4e4b-a11e-42b171c5b367","Type":"ContainerDied","Data":"b45fd7aaa556bb9ff1015ec059f64c4469d640f67063c66fc0e2d2907305efea"} Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.344474 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-8t7k2" event={"ID":"c58e5974-3461-4e24-9576-60f97be5fcb3","Type":"ContainerDied","Data":"2824c2690e571aac436967c82a58348360c11c7af914ae3aeaadcd6cfb96b15a"} Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.344498 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2824c2690e571aac436967c82a58348360c11c7af914ae3aeaadcd6cfb96b15a" Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.344627 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-8t7k2" Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.497851 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.683705 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-utilities\") pod \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.683908 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clwm5\" (UniqueName: \"kubernetes.io/projected/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-kube-api-access-clwm5\") pod \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.683939 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-catalog-content\") pod \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\" (UID: \"6fd7e6cd-5936-4e4b-a11e-42b171c5b367\") " Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.685030 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-utilities" (OuterVolumeSpecName: "utilities") pod "6fd7e6cd-5936-4e4b-a11e-42b171c5b367" (UID: "6fd7e6cd-5936-4e4b-a11e-42b171c5b367"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.688225 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-kube-api-access-clwm5" (OuterVolumeSpecName: "kube-api-access-clwm5") pod "6fd7e6cd-5936-4e4b-a11e-42b171c5b367" (UID: "6fd7e6cd-5936-4e4b-a11e-42b171c5b367"). InnerVolumeSpecName "kube-api-access-clwm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.766718 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6fd7e6cd-5936-4e4b-a11e-42b171c5b367" (UID: "6fd7e6cd-5936-4e4b-a11e-42b171c5b367"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.786202 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.786251 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clwm5\" (UniqueName: \"kubernetes.io/projected/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-kube-api-access-clwm5\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:20 crc kubenswrapper[4816]: I0216 14:44:20.786268 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fd7e6cd-5936-4e4b-a11e-42b171c5b367-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:21 crc kubenswrapper[4816]: I0216 14:44:21.360876 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nb9tc" event={"ID":"6fd7e6cd-5936-4e4b-a11e-42b171c5b367","Type":"ContainerDied","Data":"299eb09c7c2e0486e9c2c13c5793000ba56eeeb002966e6c531564643e0c1145"} Feb 16 14:44:21 crc kubenswrapper[4816]: I0216 14:44:21.360944 4816 scope.go:117] "RemoveContainer" containerID="b45fd7aaa556bb9ff1015ec059f64c4469d640f67063c66fc0e2d2907305efea" Feb 16 14:44:21 crc kubenswrapper[4816]: I0216 14:44:21.361510 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nb9tc" Feb 16 14:44:21 crc kubenswrapper[4816]: I0216 14:44:21.650930 4816 scope.go:117] "RemoveContainer" containerID="6ef11496c831ea23889f3b717f71459b0681a4b447772c03eec5e0df98ba7569" Feb 16 14:44:21 crc kubenswrapper[4816]: I0216 14:44:21.664433 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nb9tc"] Feb 16 14:44:21 crc kubenswrapper[4816]: I0216 14:44:21.664472 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nb9tc"] Feb 16 14:44:21 crc kubenswrapper[4816]: I0216 14:44:21.678036 4816 scope.go:117] "RemoveContainer" containerID="db9b0e43ab9a32dbb14f53347ceaa7c3ad6b14a41d2e9b1fe3dc5d63d6d3a40c" Feb 16 14:44:23 crc kubenswrapper[4816]: I0216 14:44:23.423805 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" path="/var/lib/kubelet/pods/6fd7e6cd-5936-4e4b-a11e-42b171c5b367/volumes" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.157489 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Feb 16 14:44:24 crc kubenswrapper[4816]: E0216 14:44:24.158054 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerName="registry-server" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158076 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerName="registry-server" Feb 16 14:44:24 crc kubenswrapper[4816]: E0216 14:44:24.158093 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" containerName="extract-utilities" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158104 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" containerName="extract-utilities" Feb 16 14:44:24 crc kubenswrapper[4816]: E0216 14:44:24.158129 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" containerName="extract-content" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158138 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" containerName="extract-content" Feb 16 14:44:24 crc kubenswrapper[4816]: E0216 14:44:24.158151 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" containerName="registry-server" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158159 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" containerName="registry-server" Feb 16 14:44:24 crc kubenswrapper[4816]: E0216 14:44:24.158176 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerName="extract-utilities" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158184 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerName="extract-utilities" Feb 16 14:44:24 crc kubenswrapper[4816]: E0216 14:44:24.158213 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c58e5974-3461-4e24-9576-60f97be5fcb3" containerName="aodh-db-sync" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158221 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="c58e5974-3461-4e24-9576-60f97be5fcb3" containerName="aodh-db-sync" Feb 16 14:44:24 crc kubenswrapper[4816]: E0216 14:44:24.158234 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerName="extract-content" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158243 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerName="extract-content" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158504 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="52afb6b3-043c-409d-a12b-30cbed642e07" containerName="registry-server" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158519 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="c58e5974-3461-4e24-9576-60f97be5fcb3" containerName="aodh-db-sync" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.158534 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fd7e6cd-5936-4e4b-a11e-42b171c5b367" containerName="registry-server" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.165615 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.168942 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.169122 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.169352 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-x2wt4" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.172076 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.252189 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f425268-34f3-4db6-a4e7-b806242b8264-config-data\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.252573 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzgw5\" (UniqueName: \"kubernetes.io/projected/7f425268-34f3-4db6-a4e7-b806242b8264-kube-api-access-vzgw5\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.252603 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f425268-34f3-4db6-a4e7-b806242b8264-scripts\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.252860 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f425268-34f3-4db6-a4e7-b806242b8264-combined-ca-bundle\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.355484 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f425268-34f3-4db6-a4e7-b806242b8264-combined-ca-bundle\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.355642 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f425268-34f3-4db6-a4e7-b806242b8264-config-data\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.355806 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzgw5\" (UniqueName: \"kubernetes.io/projected/7f425268-34f3-4db6-a4e7-b806242b8264-kube-api-access-vzgw5\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.355853 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f425268-34f3-4db6-a4e7-b806242b8264-scripts\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.364276 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f425268-34f3-4db6-a4e7-b806242b8264-scripts\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.365322 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f425268-34f3-4db6-a4e7-b806242b8264-config-data\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.368149 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f425268-34f3-4db6-a4e7-b806242b8264-combined-ca-bundle\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.528395 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzgw5\" (UniqueName: \"kubernetes.io/projected/7f425268-34f3-4db6-a4e7-b806242b8264-kube-api-access-vzgw5\") pod \"aodh-0\" (UID: \"7f425268-34f3-4db6-a4e7-b806242b8264\") " pod="openstack/aodh-0" Feb 16 14:44:24 crc kubenswrapper[4816]: I0216 14:44:24.790805 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Feb 16 14:44:25 crc kubenswrapper[4816]: I0216 14:44:25.632090 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Feb 16 14:44:26 crc kubenswrapper[4816]: I0216 14:44:26.579814 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"7f425268-34f3-4db6-a4e7-b806242b8264","Type":"ContainerStarted","Data":"c9bb53ad7501eeee7790ef459ec795757cecf15efb6cf402f0c6c11fadcd86df"} Feb 16 14:44:26 crc kubenswrapper[4816]: I0216 14:44:26.580554 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"7f425268-34f3-4db6-a4e7-b806242b8264","Type":"ContainerStarted","Data":"46fbdc9870ab09510a7a7a7c6fe06ec5da0264623dc297be6def038143ead561"} Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.053390 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkv5w"] Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.068336 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkv5w"] Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.092324 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.093015 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="ceilometer-central-agent" containerID="cri-o://1386843639fd2b9e8402e9702323d6d683191602f988941e531ee2cf29fb55d6" gracePeriod=30 Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.093147 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="sg-core" containerID="cri-o://2227a8de0db522f0a4da1180476f5f06a8f18153a3549ada2683ec9e13331abe" gracePeriod=30 Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.093033 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="proxy-httpd" containerID="cri-o://db30d225aa918e044d0c722cd502ed3b2d477460d1191aa714090ff8478ee46c" gracePeriod=30 Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.093105 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="ceilometer-notification-agent" containerID="cri-o://c100893e3aae34edc214ccf8a9793fd5bd2c803ee0e457296b9f1a4032af0539" gracePeriod=30 Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.613864 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d02a9f59-6c3c-471b-bee9-49b243451335" path="/var/lib/kubelet/pods/d02a9f59-6c3c-471b-bee9-49b243451335/volumes" Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.629022 4816 generic.go:334] "Generic (PLEG): container finished" podID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerID="db30d225aa918e044d0c722cd502ed3b2d477460d1191aa714090ff8478ee46c" exitCode=0 Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.629062 4816 generic.go:334] "Generic (PLEG): container finished" podID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerID="2227a8de0db522f0a4da1180476f5f06a8f18153a3549ada2683ec9e13331abe" exitCode=2 Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.629086 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerDied","Data":"db30d225aa918e044d0c722cd502ed3b2d477460d1191aa714090ff8478ee46c"} Feb 16 14:44:27 crc kubenswrapper[4816]: I0216 14:44:27.629116 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerDied","Data":"2227a8de0db522f0a4da1180476f5f06a8f18153a3549ada2683ec9e13331abe"} Feb 16 14:44:28 crc kubenswrapper[4816]: I0216 14:44:28.708382 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"7f425268-34f3-4db6-a4e7-b806242b8264","Type":"ContainerStarted","Data":"a3543a6a0484b83fea3c92d5ca91354a519eb46606c08a7edc4cf332c3565d3f"} Feb 16 14:44:28 crc kubenswrapper[4816]: I0216 14:44:28.718966 4816 generic.go:334] "Generic (PLEG): container finished" podID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerID="1386843639fd2b9e8402e9702323d6d683191602f988941e531ee2cf29fb55d6" exitCode=0 Feb 16 14:44:28 crc kubenswrapper[4816]: I0216 14:44:28.719356 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerDied","Data":"1386843639fd2b9e8402e9702323d6d683191602f988941e531ee2cf29fb55d6"} Feb 16 14:44:28 crc kubenswrapper[4816]: E0216 14:44:28.733033 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fd7e6cd_5936_4e4b_a11e_42b171c5b367.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:44:29 crc kubenswrapper[4816]: I0216 14:44:29.403489 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:44:29 crc kubenswrapper[4816]: E0216 14:44:29.404048 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:44:29 crc kubenswrapper[4816]: I0216 14:44:29.854412 4816 scope.go:117] "RemoveContainer" containerID="056343b4ef6faa891f7fa46300aff17a63ef02d5acd482d4766ac3918daa7b14" Feb 16 14:44:29 crc kubenswrapper[4816]: I0216 14:44:29.877761 4816 scope.go:117] "RemoveContainer" containerID="fb5b40fc25c161c49d8e03ae846098440af71b01c62f721f9e88f67ea4a423b2" Feb 16 14:44:29 crc kubenswrapper[4816]: I0216 14:44:29.945518 4816 scope.go:117] "RemoveContainer" containerID="0341e288a53c31a042bfa1475252a62e458bcccf05b4f66e0578c27267c33fa2" Feb 16 14:44:30 crc kubenswrapper[4816]: I0216 14:44:30.011921 4816 scope.go:117] "RemoveContainer" containerID="76732a0ced934cf01b3e6505bf23a6da2a741abcc55920621e0db4aed7d34868" Feb 16 14:44:30 crc kubenswrapper[4816]: I0216 14:44:30.069705 4816 scope.go:117] "RemoveContainer" containerID="a7120fc8ec9a659822ccd123c0191d630554675320e62da2847dcbcf8b44e822" Feb 16 14:44:30 crc kubenswrapper[4816]: I0216 14:44:30.092941 4816 scope.go:117] "RemoveContainer" containerID="c794b0113a836fee1d955faf272d5fdc94601342b3dda31ee2d32b74eef46567" Feb 16 14:44:30 crc kubenswrapper[4816]: I0216 14:44:30.111994 4816 scope.go:117] "RemoveContainer" containerID="2025918f770e24de3e670744ef56c08acedc38816c4db660859f1a863bb571c9" Feb 16 14:44:30 crc kubenswrapper[4816]: I0216 14:44:30.143602 4816 scope.go:117] "RemoveContainer" containerID="bee255755502550560bd0f35053ddb28b3411e19d961270f163245789114399b" Feb 16 14:44:30 crc kubenswrapper[4816]: I0216 14:44:30.165025 4816 scope.go:117] "RemoveContainer" containerID="793795fa46642e25f4a645c053b1ad6d327c2889a57a94cdada80daf5155f676" Feb 16 14:44:30 crc kubenswrapper[4816]: I0216 14:44:30.188981 4816 scope.go:117] "RemoveContainer" containerID="edf7fb2c209872600a4b2481a03e7911cd3d8f0f884205f502226613dd89d639" Feb 16 14:44:30 crc kubenswrapper[4816]: I0216 14:44:30.259314 4816 scope.go:117] "RemoveContainer" containerID="b58d4c0e79eaf9cf535ba14e2aadc38a7679ed768c9b43c41bef2ae071180560" Feb 16 14:44:30 crc kubenswrapper[4816]: I0216 14:44:30.754237 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"7f425268-34f3-4db6-a4e7-b806242b8264","Type":"ContainerStarted","Data":"4bc232575cf853923e2ec4e719c829332b4900945d594d23e4a9b8b703a6e7f0"} Feb 16 14:44:31 crc kubenswrapper[4816]: I0216 14:44:31.768834 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"7f425268-34f3-4db6-a4e7-b806242b8264","Type":"ContainerStarted","Data":"88fff420a7fe65e7584f7738948cc7292fca31f4baddc71f84ce3864513e3f90"} Feb 16 14:44:31 crc kubenswrapper[4816]: I0216 14:44:31.801753 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.542653489 podStartE2EDuration="7.801712993s" podCreationTimestamp="2026-02-16 14:44:24 +0000 UTC" firstStartedPulling="2026-02-16 14:44:25.653030032 +0000 UTC m=+6064.979743770" lastFinishedPulling="2026-02-16 14:44:30.912089546 +0000 UTC m=+6070.238803274" observedRunningTime="2026-02-16 14:44:31.789006536 +0000 UTC m=+6071.115720264" watchObservedRunningTime="2026-02-16 14:44:31.801712993 +0000 UTC m=+6071.128426721" Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.790520 4816 generic.go:334] "Generic (PLEG): container finished" podID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerID="c100893e3aae34edc214ccf8a9793fd5bd2c803ee0e457296b9f1a4032af0539" exitCode=0 Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.791053 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerDied","Data":"c100893e3aae34edc214ccf8a9793fd5bd2c803ee0e457296b9f1a4032af0539"} Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.791086 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"419310d7-65ac-4798-abcc-e7561cbe8ae7","Type":"ContainerDied","Data":"e742c3617d284e0be5c20f29e206cf5aef071b6feaf39667a066fa3436dc58ee"} Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.791101 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e742c3617d284e0be5c20f29e206cf5aef071b6feaf39667a066fa3436dc58ee" Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.829292 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.979682 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mspdp\" (UniqueName: \"kubernetes.io/projected/419310d7-65ac-4798-abcc-e7561cbe8ae7-kube-api-access-mspdp\") pod \"419310d7-65ac-4798-abcc-e7561cbe8ae7\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.979943 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-combined-ca-bundle\") pod \"419310d7-65ac-4798-abcc-e7561cbe8ae7\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.980050 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-scripts\") pod \"419310d7-65ac-4798-abcc-e7561cbe8ae7\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.980093 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-run-httpd\") pod \"419310d7-65ac-4798-abcc-e7561cbe8ae7\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.980173 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-log-httpd\") pod \"419310d7-65ac-4798-abcc-e7561cbe8ae7\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.980198 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-config-data\") pod \"419310d7-65ac-4798-abcc-e7561cbe8ae7\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.980251 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-sg-core-conf-yaml\") pod \"419310d7-65ac-4798-abcc-e7561cbe8ae7\" (UID: \"419310d7-65ac-4798-abcc-e7561cbe8ae7\") " Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.981145 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "419310d7-65ac-4798-abcc-e7561cbe8ae7" (UID: "419310d7-65ac-4798-abcc-e7561cbe8ae7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.981903 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "419310d7-65ac-4798-abcc-e7561cbe8ae7" (UID: "419310d7-65ac-4798-abcc-e7561cbe8ae7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.985146 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/419310d7-65ac-4798-abcc-e7561cbe8ae7-kube-api-access-mspdp" (OuterVolumeSpecName: "kube-api-access-mspdp") pod "419310d7-65ac-4798-abcc-e7561cbe8ae7" (UID: "419310d7-65ac-4798-abcc-e7561cbe8ae7"). InnerVolumeSpecName "kube-api-access-mspdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:44:33 crc kubenswrapper[4816]: I0216 14:44:33.985449 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-scripts" (OuterVolumeSpecName: "scripts") pod "419310d7-65ac-4798-abcc-e7561cbe8ae7" (UID: "419310d7-65ac-4798-abcc-e7561cbe8ae7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.021343 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "419310d7-65ac-4798-abcc-e7561cbe8ae7" (UID: "419310d7-65ac-4798-abcc-e7561cbe8ae7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.059979 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "419310d7-65ac-4798-abcc-e7561cbe8ae7" (UID: "419310d7-65ac-4798-abcc-e7561cbe8ae7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.083077 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.083108 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.083120 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/419310d7-65ac-4798-abcc-e7561cbe8ae7-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.083128 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.083137 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mspdp\" (UniqueName: \"kubernetes.io/projected/419310d7-65ac-4798-abcc-e7561cbe8ae7-kube-api-access-mspdp\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.083146 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.103112 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-config-data" (OuterVolumeSpecName: "config-data") pod "419310d7-65ac-4798-abcc-e7561cbe8ae7" (UID: "419310d7-65ac-4798-abcc-e7561cbe8ae7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.184717 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419310d7-65ac-4798-abcc-e7561cbe8ae7-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.800981 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.851804 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.863336 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.876117 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:44:34 crc kubenswrapper[4816]: E0216 14:44:34.877040 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="ceilometer-notification-agent" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.877103 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="ceilometer-notification-agent" Feb 16 14:44:34 crc kubenswrapper[4816]: E0216 14:44:34.877124 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="proxy-httpd" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.877133 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="proxy-httpd" Feb 16 14:44:34 crc kubenswrapper[4816]: E0216 14:44:34.877199 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="sg-core" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.877211 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="sg-core" Feb 16 14:44:34 crc kubenswrapper[4816]: E0216 14:44:34.877242 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="ceilometer-central-agent" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.877250 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="ceilometer-central-agent" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.877582 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="sg-core" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.877612 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="ceilometer-notification-agent" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.877625 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="ceilometer-central-agent" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.877651 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" containerName="proxy-httpd" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.880739 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.884122 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.884502 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 14:44:34 crc kubenswrapper[4816]: I0216 14:44:34.895685 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.000882 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-config-data\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.000964 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-scripts\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.001062 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.001111 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.001149 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-log-httpd\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.001169 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5td2w\" (UniqueName: \"kubernetes.io/projected/db5a28ec-6409-4676-b4d5-2dd89574c2a2-kube-api-access-5td2w\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.001191 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-run-httpd\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.102676 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.102746 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-log-httpd\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.102794 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5td2w\" (UniqueName: \"kubernetes.io/projected/db5a28ec-6409-4676-b4d5-2dd89574c2a2-kube-api-access-5td2w\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.102818 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-run-httpd\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.102946 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-config-data\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.102995 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-scripts\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.103059 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.103493 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-log-httpd\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.103621 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-run-httpd\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.106824 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.107226 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-config-data\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.107545 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.108466 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-scripts\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.124062 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5td2w\" (UniqueName: \"kubernetes.io/projected/db5a28ec-6409-4676-b4d5-2dd89574c2a2-kube-api-access-5td2w\") pod \"ceilometer-0\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.220482 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.438036 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="419310d7-65ac-4798-abcc-e7561cbe8ae7" path="/var/lib/kubelet/pods/419310d7-65ac-4798-abcc-e7561cbe8ae7/volumes" Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.733309 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:44:35 crc kubenswrapper[4816]: I0216 14:44:35.811272 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerStarted","Data":"67b2e6e2c85dca3d515af7e6dadc23db820e8e4b0ff651f38b261039f3214c90"} Feb 16 14:44:36 crc kubenswrapper[4816]: I0216 14:44:36.821436 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerStarted","Data":"7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0"} Feb 16 14:44:37 crc kubenswrapper[4816]: I0216 14:44:37.838949 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerStarted","Data":"f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646"} Feb 16 14:44:37 crc kubenswrapper[4816]: I0216 14:44:37.882355 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-2xbmj"] Feb 16 14:44:37 crc kubenswrapper[4816]: I0216 14:44:37.895364 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:37 crc kubenswrapper[4816]: I0216 14:44:37.921765 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-2xbmj"] Feb 16 14:44:37 crc kubenswrapper[4816]: I0216 14:44:37.962962 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-operator-scripts\") pod \"manila-db-create-2xbmj\" (UID: \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\") " pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:37 crc kubenswrapper[4816]: I0216 14:44:37.963308 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mks7p\" (UniqueName: \"kubernetes.io/projected/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-kube-api-access-mks7p\") pod \"manila-db-create-2xbmj\" (UID: \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\") " pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.264381 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mks7p\" (UniqueName: \"kubernetes.io/projected/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-kube-api-access-mks7p\") pod \"manila-db-create-2xbmj\" (UID: \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\") " pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.264500 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-operator-scripts\") pod \"manila-db-create-2xbmj\" (UID: \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\") " pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.265262 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-operator-scripts\") pod \"manila-db-create-2xbmj\" (UID: \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\") " pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.294970 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mks7p\" (UniqueName: \"kubernetes.io/projected/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-kube-api-access-mks7p\") pod \"manila-db-create-2xbmj\" (UID: \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\") " pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.306880 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-a628-account-create-update-hkxdn"] Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.308451 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.313479 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.315400 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-a628-account-create-update-hkxdn"] Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.477922 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc8pl\" (UniqueName: \"kubernetes.io/projected/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-kube-api-access-jc8pl\") pod \"manila-a628-account-create-update-hkxdn\" (UID: \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\") " pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.478015 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-operator-scripts\") pod \"manila-a628-account-create-update-hkxdn\" (UID: \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\") " pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.514146 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.581008 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc8pl\" (UniqueName: \"kubernetes.io/projected/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-kube-api-access-jc8pl\") pod \"manila-a628-account-create-update-hkxdn\" (UID: \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\") " pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.581300 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-operator-scripts\") pod \"manila-a628-account-create-update-hkxdn\" (UID: \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\") " pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.582023 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-operator-scripts\") pod \"manila-a628-account-create-update-hkxdn\" (UID: \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\") " pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.610543 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc8pl\" (UniqueName: \"kubernetes.io/projected/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-kube-api-access-jc8pl\") pod \"manila-a628-account-create-update-hkxdn\" (UID: \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\") " pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.662225 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:38 crc kubenswrapper[4816]: I0216 14:44:38.860043 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerStarted","Data":"e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a"} Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.248410 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-2xbmj"] Feb 16 14:44:39 crc kubenswrapper[4816]: E0216 14:44:39.285599 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fd7e6cd_5936_4e4b_a11e_42b171c5b367.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.469415 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-a628-account-create-update-hkxdn"] Feb 16 14:44:39 crc kubenswrapper[4816]: W0216 14:44:39.477259 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod747ea09f_2f99_4b20_ae42_49a8fbbd1c56.slice/crio-5c80efff0c591d43f4b4ab693dd83f8e45caf2410f7ae4856c6b8592bccba39c WatchSource:0}: Error finding container 5c80efff0c591d43f4b4ab693dd83f8e45caf2410f7ae4856c6b8592bccba39c: Status 404 returned error can't find the container with id 5c80efff0c591d43f4b4ab693dd83f8e45caf2410f7ae4856c6b8592bccba39c Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.870775 4816 generic.go:334] "Generic (PLEG): container finished" podID="4b6b729a-9df6-47f0-847a-1c16cf3a3f25" containerID="31eb500f8c9309455b1f72031c2ce1da19ef6bec4306bacd0a29c504c13c229d" exitCode=0 Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.870969 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-2xbmj" event={"ID":"4b6b729a-9df6-47f0-847a-1c16cf3a3f25","Type":"ContainerDied","Data":"31eb500f8c9309455b1f72031c2ce1da19ef6bec4306bacd0a29c504c13c229d"} Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.871108 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-2xbmj" event={"ID":"4b6b729a-9df6-47f0-847a-1c16cf3a3f25","Type":"ContainerStarted","Data":"40e63d028349140e7f7b2f36ed765f6ee94fc6b1382e74d336bbcc0641e8836d"} Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.874050 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerStarted","Data":"70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb"} Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.875098 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.877003 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a628-account-create-update-hkxdn" event={"ID":"747ea09f-2f99-4b20-ae42-49a8fbbd1c56","Type":"ContainerStarted","Data":"8d60c16420dc20619141aca49ac52c4581b2124329b25c3b05466253750af2d6"} Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.877033 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a628-account-create-update-hkxdn" event={"ID":"747ea09f-2f99-4b20-ae42-49a8fbbd1c56","Type":"ContainerStarted","Data":"5c80efff0c591d43f4b4ab693dd83f8e45caf2410f7ae4856c6b8592bccba39c"} Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.919260 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.133520054 podStartE2EDuration="5.919232777s" podCreationTimestamp="2026-02-16 14:44:34 +0000 UTC" firstStartedPulling="2026-02-16 14:44:35.739433049 +0000 UTC m=+6075.066146777" lastFinishedPulling="2026-02-16 14:44:39.525145772 +0000 UTC m=+6078.851859500" observedRunningTime="2026-02-16 14:44:39.909772999 +0000 UTC m=+6079.236486737" watchObservedRunningTime="2026-02-16 14:44:39.919232777 +0000 UTC m=+6079.245946525" Feb 16 14:44:39 crc kubenswrapper[4816]: I0216 14:44:39.941465 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-a628-account-create-update-hkxdn" podStartSLOduration=1.941443442 podStartE2EDuration="1.941443442s" podCreationTimestamp="2026-02-16 14:44:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:44:39.926577797 +0000 UTC m=+6079.253291525" watchObservedRunningTime="2026-02-16 14:44:39.941443442 +0000 UTC m=+6079.268157170" Feb 16 14:44:40 crc kubenswrapper[4816]: I0216 14:44:40.888700 4816 generic.go:334] "Generic (PLEG): container finished" podID="747ea09f-2f99-4b20-ae42-49a8fbbd1c56" containerID="8d60c16420dc20619141aca49ac52c4581b2124329b25c3b05466253750af2d6" exitCode=0 Feb 16 14:44:40 crc kubenswrapper[4816]: I0216 14:44:40.888747 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a628-account-create-update-hkxdn" event={"ID":"747ea09f-2f99-4b20-ae42-49a8fbbd1c56","Type":"ContainerDied","Data":"8d60c16420dc20619141aca49ac52c4581b2124329b25c3b05466253750af2d6"} Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.332104 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.342412 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-operator-scripts\") pod \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\" (UID: \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\") " Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.342459 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mks7p\" (UniqueName: \"kubernetes.io/projected/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-kube-api-access-mks7p\") pod \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\" (UID: \"4b6b729a-9df6-47f0-847a-1c16cf3a3f25\") " Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.343872 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4b6b729a-9df6-47f0-847a-1c16cf3a3f25" (UID: "4b6b729a-9df6-47f0-847a-1c16cf3a3f25"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.347796 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-kube-api-access-mks7p" (OuterVolumeSpecName: "kube-api-access-mks7p") pod "4b6b729a-9df6-47f0-847a-1c16cf3a3f25" (UID: "4b6b729a-9df6-47f0-847a-1c16cf3a3f25"). InnerVolumeSpecName "kube-api-access-mks7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.447461 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mks7p\" (UniqueName: \"kubernetes.io/projected/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-kube-api-access-mks7p\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.447498 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b6b729a-9df6-47f0-847a-1c16cf3a3f25-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.902543 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-2xbmj" Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.903242 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-2xbmj" event={"ID":"4b6b729a-9df6-47f0-847a-1c16cf3a3f25","Type":"ContainerDied","Data":"40e63d028349140e7f7b2f36ed765f6ee94fc6b1382e74d336bbcc0641e8836d"} Feb 16 14:44:41 crc kubenswrapper[4816]: I0216 14:44:41.903272 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40e63d028349140e7f7b2f36ed765f6ee94fc6b1382e74d336bbcc0641e8836d" Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.310434 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.406769 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:44:42 crc kubenswrapper[4816]: E0216 14:44:42.407078 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.467679 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc8pl\" (UniqueName: \"kubernetes.io/projected/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-kube-api-access-jc8pl\") pod \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\" (UID: \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\") " Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.468277 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-operator-scripts\") pod \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\" (UID: \"747ea09f-2f99-4b20-ae42-49a8fbbd1c56\") " Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.468792 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "747ea09f-2f99-4b20-ae42-49a8fbbd1c56" (UID: "747ea09f-2f99-4b20-ae42-49a8fbbd1c56"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.469783 4816 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.477403 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-kube-api-access-jc8pl" (OuterVolumeSpecName: "kube-api-access-jc8pl") pod "747ea09f-2f99-4b20-ae42-49a8fbbd1c56" (UID: "747ea09f-2f99-4b20-ae42-49a8fbbd1c56"). InnerVolumeSpecName "kube-api-access-jc8pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.573752 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc8pl\" (UniqueName: \"kubernetes.io/projected/747ea09f-2f99-4b20-ae42-49a8fbbd1c56-kube-api-access-jc8pl\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.910468 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a628-account-create-update-hkxdn" event={"ID":"747ea09f-2f99-4b20-ae42-49a8fbbd1c56","Type":"ContainerDied","Data":"5c80efff0c591d43f4b4ab693dd83f8e45caf2410f7ae4856c6b8592bccba39c"} Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.911597 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c80efff0c591d43f4b4ab693dd83f8e45caf2410f7ae4856c6b8592bccba39c" Feb 16 14:44:42 crc kubenswrapper[4816]: I0216 14:44:42.911571 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a628-account-create-update-hkxdn" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.656747 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-pqn4j"] Feb 16 14:44:43 crc kubenswrapper[4816]: E0216 14:44:43.657528 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b6b729a-9df6-47f0-847a-1c16cf3a3f25" containerName="mariadb-database-create" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.657557 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b6b729a-9df6-47f0-847a-1c16cf3a3f25" containerName="mariadb-database-create" Feb 16 14:44:43 crc kubenswrapper[4816]: E0216 14:44:43.657592 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="747ea09f-2f99-4b20-ae42-49a8fbbd1c56" containerName="mariadb-account-create-update" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.657601 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="747ea09f-2f99-4b20-ae42-49a8fbbd1c56" containerName="mariadb-account-create-update" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.657878 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b6b729a-9df6-47f0-847a-1c16cf3a3f25" containerName="mariadb-database-create" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.657926 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="747ea09f-2f99-4b20-ae42-49a8fbbd1c56" containerName="mariadb-account-create-update" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.658845 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.661315 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-z5hnq" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.666769 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.675927 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-pqn4j"] Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.706992 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fc9b\" (UniqueName: \"kubernetes.io/projected/cd3b7c80-9ea3-48c6-91fc-947d0315b206-kube-api-access-8fc9b\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.707103 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-config-data\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.707162 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-combined-ca-bundle\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.707266 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-job-config-data\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.810854 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-combined-ca-bundle\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.811061 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-job-config-data\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.811490 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fc9b\" (UniqueName: \"kubernetes.io/projected/cd3b7c80-9ea3-48c6-91fc-947d0315b206-kube-api-access-8fc9b\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.811853 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-config-data\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.815704 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-job-config-data\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.816489 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-combined-ca-bundle\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.821555 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-config-data\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:43 crc kubenswrapper[4816]: I0216 14:44:43.836681 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fc9b\" (UniqueName: \"kubernetes.io/projected/cd3b7c80-9ea3-48c6-91fc-947d0315b206-kube-api-access-8fc9b\") pod \"manila-db-sync-pqn4j\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:44 crc kubenswrapper[4816]: I0216 14:44:44.021546 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:44 crc kubenswrapper[4816]: I0216 14:44:44.978363 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-pqn4j"] Feb 16 14:44:45 crc kubenswrapper[4816]: I0216 14:44:45.938446 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-pqn4j" event={"ID":"cd3b7c80-9ea3-48c6-91fc-947d0315b206","Type":"ContainerStarted","Data":"5531de9f90b5f5ec2bf800f8b4a5891d2cb7a203a6dd93ddb2b67cb78b9f97c7"} Feb 16 14:44:49 crc kubenswrapper[4816]: E0216 14:44:49.605247 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fd7e6cd_5936_4e4b_a11e_42b171c5b367.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:44:50 crc kubenswrapper[4816]: I0216 14:44:50.994401 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-pqn4j" event={"ID":"cd3b7c80-9ea3-48c6-91fc-947d0315b206","Type":"ContainerStarted","Data":"d670ac68c03d876eeb2cc36d9b12eefb455db6144ef754274c63e72bf27d6869"} Feb 16 14:44:51 crc kubenswrapper[4816]: I0216 14:44:51.020854 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-pqn4j" podStartSLOduration=3.452077182 podStartE2EDuration="8.020798224s" podCreationTimestamp="2026-02-16 14:44:43 +0000 UTC" firstStartedPulling="2026-02-16 14:44:44.985995968 +0000 UTC m=+6084.312709696" lastFinishedPulling="2026-02-16 14:44:49.55471697 +0000 UTC m=+6088.881430738" observedRunningTime="2026-02-16 14:44:51.007567874 +0000 UTC m=+6090.334281612" watchObservedRunningTime="2026-02-16 14:44:51.020798224 +0000 UTC m=+6090.347511972" Feb 16 14:44:52 crc kubenswrapper[4816]: I0216 14:44:52.006507 4816 generic.go:334] "Generic (PLEG): container finished" podID="cd3b7c80-9ea3-48c6-91fc-947d0315b206" containerID="d670ac68c03d876eeb2cc36d9b12eefb455db6144ef754274c63e72bf27d6869" exitCode=0 Feb 16 14:44:52 crc kubenswrapper[4816]: I0216 14:44:52.006565 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-pqn4j" event={"ID":"cd3b7c80-9ea3-48c6-91fc-947d0315b206","Type":"ContainerDied","Data":"d670ac68c03d876eeb2cc36d9b12eefb455db6144ef754274c63e72bf27d6869"} Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.399444 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:44:53 crc kubenswrapper[4816]: E0216 14:44:53.400278 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.500388 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.554377 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-combined-ca-bundle\") pod \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.554516 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-job-config-data\") pod \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.554546 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fc9b\" (UniqueName: \"kubernetes.io/projected/cd3b7c80-9ea3-48c6-91fc-947d0315b206-kube-api-access-8fc9b\") pod \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.554630 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-config-data\") pod \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\" (UID: \"cd3b7c80-9ea3-48c6-91fc-947d0315b206\") " Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.571901 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "cd3b7c80-9ea3-48c6-91fc-947d0315b206" (UID: "cd3b7c80-9ea3-48c6-91fc-947d0315b206"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.572037 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd3b7c80-9ea3-48c6-91fc-947d0315b206-kube-api-access-8fc9b" (OuterVolumeSpecName: "kube-api-access-8fc9b") pod "cd3b7c80-9ea3-48c6-91fc-947d0315b206" (UID: "cd3b7c80-9ea3-48c6-91fc-947d0315b206"). InnerVolumeSpecName "kube-api-access-8fc9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.578098 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-config-data" (OuterVolumeSpecName: "config-data") pod "cd3b7c80-9ea3-48c6-91fc-947d0315b206" (UID: "cd3b7c80-9ea3-48c6-91fc-947d0315b206"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.586850 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd3b7c80-9ea3-48c6-91fc-947d0315b206" (UID: "cd3b7c80-9ea3-48c6-91fc-947d0315b206"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.657807 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.658104 4816 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-job-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.658184 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fc9b\" (UniqueName: \"kubernetes.io/projected/cd3b7c80-9ea3-48c6-91fc-947d0315b206-kube-api-access-8fc9b\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:53 crc kubenswrapper[4816]: I0216 14:44:53.658263 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd3b7c80-9ea3-48c6-91fc-947d0315b206-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.026088 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-pqn4j" event={"ID":"cd3b7c80-9ea3-48c6-91fc-947d0315b206","Type":"ContainerDied","Data":"5531de9f90b5f5ec2bf800f8b4a5891d2cb7a203a6dd93ddb2b67cb78b9f97c7"} Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.026737 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5531de9f90b5f5ec2bf800f8b4a5891d2cb7a203a6dd93ddb2b67cb78b9f97c7" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.026385 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-pqn4j" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.405389 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Feb 16 14:44:54 crc kubenswrapper[4816]: E0216 14:44:54.405959 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd3b7c80-9ea3-48c6-91fc-947d0315b206" containerName="manila-db-sync" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.405976 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd3b7c80-9ea3-48c6-91fc-947d0315b206" containerName="manila-db-sync" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.406244 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd3b7c80-9ea3-48c6-91fc-947d0315b206" containerName="manila-db-sync" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.426109 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.426199 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.456531 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.458102 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.458363 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.458476 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-z5hnq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.473102 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-866d8d774c-svqvq"] Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.475371 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.475458 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-config-data\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.475517 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crr8d\" (UniqueName: \"kubernetes.io/projected/9f46265c-6ed1-44e2-b935-ed26dda53d9d-kube-api-access-crr8d\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.475585 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.475703 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-scripts\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.475887 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f46265c-6ed1-44e2-b935-ed26dda53d9d-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.475996 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.509774 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-866d8d774c-svqvq"] Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.534570 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.536427 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.539038 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.553398 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578281 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-scripts\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578348 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c993b595-71ab-4e33-83ea-3c1954491d41-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578398 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-dns-svc\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578418 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/c993b595-71ab-4e33-83ea-3c1954491d41-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578452 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9882h\" (UniqueName: \"kubernetes.io/projected/c993b595-71ab-4e33-83ea-3c1954491d41-kube-api-access-9882h\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578468 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578493 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nr97\" (UniqueName: \"kubernetes.io/projected/de61dafb-9371-4968-8c26-702d7b9e6be6-kube-api-access-8nr97\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578511 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-config-data\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578549 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f46265c-6ed1-44e2-b935-ed26dda53d9d-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578586 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578627 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578724 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-nb\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578746 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-scripts\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578776 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-config-data\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578796 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-config\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578818 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crr8d\" (UniqueName: \"kubernetes.io/projected/9f46265c-6ed1-44e2-b935-ed26dda53d9d-kube-api-access-crr8d\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578836 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c993b595-71ab-4e33-83ea-3c1954491d41-ceph\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578861 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.578893 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-sb\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.580293 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f46265c-6ed1-44e2-b935-ed26dda53d9d-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.584188 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-scripts\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.584347 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-config-data\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.584718 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.586429 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f46265c-6ed1-44e2-b935-ed26dda53d9d-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.625924 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crr8d\" (UniqueName: \"kubernetes.io/projected/9f46265c-6ed1-44e2-b935-ed26dda53d9d-kube-api-access-crr8d\") pod \"manila-scheduler-0\" (UID: \"9f46265c-6ed1-44e2-b935-ed26dda53d9d\") " pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680361 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680443 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-nb\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680464 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-scripts\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680504 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-config\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680530 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c993b595-71ab-4e33-83ea-3c1954491d41-ceph\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680566 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-sb\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680600 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c993b595-71ab-4e33-83ea-3c1954491d41-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680639 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-dns-svc\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680672 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/c993b595-71ab-4e33-83ea-3c1954491d41-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680705 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9882h\" (UniqueName: \"kubernetes.io/projected/c993b595-71ab-4e33-83ea-3c1954491d41-kube-api-access-9882h\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680728 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680750 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nr97\" (UniqueName: \"kubernetes.io/projected/de61dafb-9371-4968-8c26-702d7b9e6be6-kube-api-access-8nr97\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.680771 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-config-data\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.681034 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c993b595-71ab-4e33-83ea-3c1954491d41-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.683990 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-config-data\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.684026 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-nb\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.684540 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-scripts\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.684583 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-dns-svc\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.684816 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/c993b595-71ab-4e33-83ea-3c1954491d41-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.685324 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-sb\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.686146 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-config\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.687955 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.688261 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c993b595-71ab-4e33-83ea-3c1954491d41-ceph\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.690076 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c993b595-71ab-4e33-83ea-3c1954491d41-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.708739 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9882h\" (UniqueName: \"kubernetes.io/projected/c993b595-71ab-4e33-83ea-3c1954491d41-kube-api-access-9882h\") pod \"manila-share-share1-0\" (UID: \"c993b595-71ab-4e33-83ea-3c1954491d41\") " pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.714836 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nr97\" (UniqueName: \"kubernetes.io/projected/de61dafb-9371-4968-8c26-702d7b9e6be6-kube-api-access-8nr97\") pod \"dnsmasq-dns-866d8d774c-svqvq\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.724580 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.726915 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.729775 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.742311 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.787452 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.787550 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-etc-machine-id\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.787580 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-logs\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.787685 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ng9h9\" (UniqueName: \"kubernetes.io/projected/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-kube-api-access-ng9h9\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.787706 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-scripts\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.787762 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-config-data-custom\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.787791 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-config-data\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.788007 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.806457 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.889591 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ng9h9\" (UniqueName: \"kubernetes.io/projected/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-kube-api-access-ng9h9\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.889644 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-scripts\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.889761 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-config-data-custom\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.889833 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-config-data\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.889877 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.889938 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-etc-machine-id\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.889964 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-logs\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.890648 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-logs\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.892455 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-etc-machine-id\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.895220 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-scripts\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.895821 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-config-data\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.896622 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.903426 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.905642 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-config-data-custom\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:54 crc kubenswrapper[4816]: I0216 14:44:54.908582 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ng9h9\" (UniqueName: \"kubernetes.io/projected/9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff-kube-api-access-ng9h9\") pod \"manila-api-0\" (UID: \"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff\") " pod="openstack/manila-api-0" Feb 16 14:44:55 crc kubenswrapper[4816]: I0216 14:44:55.096012 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Feb 16 14:44:55 crc kubenswrapper[4816]: I0216 14:44:55.394309 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Feb 16 14:44:55 crc kubenswrapper[4816]: I0216 14:44:55.705554 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-866d8d774c-svqvq"] Feb 16 14:44:55 crc kubenswrapper[4816]: I0216 14:44:55.762492 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Feb 16 14:44:55 crc kubenswrapper[4816]: I0216 14:44:55.994850 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Feb 16 14:44:56 crc kubenswrapper[4816]: I0216 14:44:56.164252 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff","Type":"ContainerStarted","Data":"e658d56c8ac70a23786df571ef95bfcc09ac0bca53c91f3a2811ef3b5f682461"} Feb 16 14:44:56 crc kubenswrapper[4816]: I0216 14:44:56.169076 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" event={"ID":"de61dafb-9371-4968-8c26-702d7b9e6be6","Type":"ContainerStarted","Data":"e2940aac2a1b589b6f7814fd2e7d147f96234c503be1bbd255040f4ad4983cdb"} Feb 16 14:44:56 crc kubenswrapper[4816]: I0216 14:44:56.169944 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"9f46265c-6ed1-44e2-b935-ed26dda53d9d","Type":"ContainerStarted","Data":"9087b3254009ef9e392d88733757017f8d7142d0082a77c5a1e278bfd32b551f"} Feb 16 14:44:56 crc kubenswrapper[4816]: I0216 14:44:56.171780 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"c993b595-71ab-4e33-83ea-3c1954491d41","Type":"ContainerStarted","Data":"ec75a27f15c0b24776faca75d534368b3fe94ee5d0b4f7973a587723485bbf70"} Feb 16 14:44:57 crc kubenswrapper[4816]: I0216 14:44:57.199717 4816 generic.go:334] "Generic (PLEG): container finished" podID="de61dafb-9371-4968-8c26-702d7b9e6be6" containerID="87ea851028b71de5b61655b0b79c23d02fc959143114ca9e62f9ab35b8cdec87" exitCode=0 Feb 16 14:44:57 crc kubenswrapper[4816]: I0216 14:44:57.200238 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" event={"ID":"de61dafb-9371-4968-8c26-702d7b9e6be6","Type":"ContainerDied","Data":"87ea851028b71de5b61655b0b79c23d02fc959143114ca9e62f9ab35b8cdec87"} Feb 16 14:44:57 crc kubenswrapper[4816]: I0216 14:44:57.212125 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"9f46265c-6ed1-44e2-b935-ed26dda53d9d","Type":"ContainerStarted","Data":"4b488265a8fcd26cf91f78ef4fc62293973fe35c845af205a578bf2e6d742c16"} Feb 16 14:44:57 crc kubenswrapper[4816]: I0216 14:44:57.231757 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff","Type":"ContainerStarted","Data":"d351468da6817bc3618a3e62cf93bbfca654a9a81c165294a37e7c32afc5b4ba"} Feb 16 14:44:57 crc kubenswrapper[4816]: I0216 14:44:57.233336 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Feb 16 14:44:57 crc kubenswrapper[4816]: I0216 14:44:57.267738 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.2677185939999998 podStartE2EDuration="3.267718594s" podCreationTimestamp="2026-02-16 14:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:44:57.258122493 +0000 UTC m=+6096.584836221" watchObservedRunningTime="2026-02-16 14:44:57.267718594 +0000 UTC m=+6096.594432322" Feb 16 14:44:58 crc kubenswrapper[4816]: I0216 14:44:58.243904 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff","Type":"ContainerStarted","Data":"a8dbdcb6767f0681323bb0cc1fa1fe13ffe437006aab5760e24f7c9ebc41e155"} Feb 16 14:44:58 crc kubenswrapper[4816]: I0216 14:44:58.246180 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" event={"ID":"de61dafb-9371-4968-8c26-702d7b9e6be6","Type":"ContainerStarted","Data":"c899cca45673f1401834bf58e18a209390d0f44369be07397582122dd3ed894c"} Feb 16 14:44:58 crc kubenswrapper[4816]: I0216 14:44:58.246263 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:44:58 crc kubenswrapper[4816]: I0216 14:44:58.252865 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"9f46265c-6ed1-44e2-b935-ed26dda53d9d","Type":"ContainerStarted","Data":"77e8f288f000b589b01dcf5e07b2337c9561a2615f97efcc8ed041a55bd37e7d"} Feb 16 14:44:58 crc kubenswrapper[4816]: I0216 14:44:58.263782 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" podStartSLOduration=4.263760283 podStartE2EDuration="4.263760283s" podCreationTimestamp="2026-02-16 14:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:44:58.262005035 +0000 UTC m=+6097.588718763" watchObservedRunningTime="2026-02-16 14:44:58.263760283 +0000 UTC m=+6097.590474021" Feb 16 14:44:58 crc kubenswrapper[4816]: I0216 14:44:58.294549 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.529835202 podStartE2EDuration="4.294530432s" podCreationTimestamp="2026-02-16 14:44:54 +0000 UTC" firstStartedPulling="2026-02-16 14:44:55.394393027 +0000 UTC m=+6094.721106755" lastFinishedPulling="2026-02-16 14:44:56.159088257 +0000 UTC m=+6095.485801985" observedRunningTime="2026-02-16 14:44:58.285373573 +0000 UTC m=+6097.612087311" watchObservedRunningTime="2026-02-16 14:44:58.294530432 +0000 UTC m=+6097.621244160" Feb 16 14:44:59 crc kubenswrapper[4816]: E0216 14:44:59.909731 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fd7e6cd_5936_4e4b_a11e_42b171c5b367.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.193940 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr"] Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.195636 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.204344 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.204493 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.222149 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr"] Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.347319 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4805de8d-a273-4606-9758-3640f20946d5-secret-volume\") pod \"collect-profiles-29520885-d6ksr\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.347395 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fhfd\" (UniqueName: \"kubernetes.io/projected/4805de8d-a273-4606-9758-3640f20946d5-kube-api-access-8fhfd\") pod \"collect-profiles-29520885-d6ksr\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.347430 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4805de8d-a273-4606-9758-3640f20946d5-config-volume\") pod \"collect-profiles-29520885-d6ksr\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.449822 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4805de8d-a273-4606-9758-3640f20946d5-secret-volume\") pod \"collect-profiles-29520885-d6ksr\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.449937 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fhfd\" (UniqueName: \"kubernetes.io/projected/4805de8d-a273-4606-9758-3640f20946d5-kube-api-access-8fhfd\") pod \"collect-profiles-29520885-d6ksr\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.449975 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4805de8d-a273-4606-9758-3640f20946d5-config-volume\") pod \"collect-profiles-29520885-d6ksr\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.452840 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4805de8d-a273-4606-9758-3640f20946d5-config-volume\") pod \"collect-profiles-29520885-d6ksr\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.470780 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4805de8d-a273-4606-9758-3640f20946d5-secret-volume\") pod \"collect-profiles-29520885-d6ksr\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.473455 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fhfd\" (UniqueName: \"kubernetes.io/projected/4805de8d-a273-4606-9758-3640f20946d5-kube-api-access-8fhfd\") pod \"collect-profiles-29520885-d6ksr\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:00 crc kubenswrapper[4816]: I0216 14:45:00.526057 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:03 crc kubenswrapper[4816]: I0216 14:45:03.491795 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr"] Feb 16 14:45:03 crc kubenswrapper[4816]: W0216 14:45:03.498154 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4805de8d_a273_4606_9758_3640f20946d5.slice/crio-5756c0c210d2f79df285618666f1ae6f926c61f3e7f96008e69dd77594ba6203 WatchSource:0}: Error finding container 5756c0c210d2f79df285618666f1ae6f926c61f3e7f96008e69dd77594ba6203: Status 404 returned error can't find the container with id 5756c0c210d2f79df285618666f1ae6f926c61f3e7f96008e69dd77594ba6203 Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.324512 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"c993b595-71ab-4e33-83ea-3c1954491d41","Type":"ContainerStarted","Data":"77ad69736a7a0852aaec929c240c7eb5d3e4cda22b7bd4dbb6c7b3df3a24aa6c"} Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.324844 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"c993b595-71ab-4e33-83ea-3c1954491d41","Type":"ContainerStarted","Data":"adfb1d44c93e3c70348ee3e2db2dd5a41b61c3fd1054326d6d45be83f2975012"} Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.327840 4816 generic.go:334] "Generic (PLEG): container finished" podID="4805de8d-a273-4606-9758-3640f20946d5" containerID="7891f3163869fccfa63dab749bed4b2d62bcb2f32ad4a8e49123e0d3dceb3bcc" exitCode=0 Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.327885 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" event={"ID":"4805de8d-a273-4606-9758-3640f20946d5","Type":"ContainerDied","Data":"7891f3163869fccfa63dab749bed4b2d62bcb2f32ad4a8e49123e0d3dceb3bcc"} Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.327911 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" event={"ID":"4805de8d-a273-4606-9758-3640f20946d5","Type":"ContainerStarted","Data":"5756c0c210d2f79df285618666f1ae6f926c61f3e7f96008e69dd77594ba6203"} Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.355739 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.044745704 podStartE2EDuration="10.355716267s" podCreationTimestamp="2026-02-16 14:44:54 +0000 UTC" firstStartedPulling="2026-02-16 14:44:55.785763697 +0000 UTC m=+6095.112477425" lastFinishedPulling="2026-02-16 14:45:03.09673426 +0000 UTC m=+6102.423447988" observedRunningTime="2026-02-16 14:45:04.345385196 +0000 UTC m=+6103.672098924" watchObservedRunningTime="2026-02-16 14:45:04.355716267 +0000 UTC m=+6103.682429995" Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.789181 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.807860 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.896412 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6999d8bd47-28jrp"] Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.896681 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" podUID="2d9ee62f-4dcf-447b-a68b-aed832204fc8" containerName="dnsmasq-dns" containerID="cri-o://6e52092c597c2064da5d45b464268075c8a0d67e2367adec76a505262da0471b" gracePeriod=10 Feb 16 14:45:04 crc kubenswrapper[4816]: I0216 14:45:04.898369 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.248228 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.352145 4816 generic.go:334] "Generic (PLEG): container finished" podID="2d9ee62f-4dcf-447b-a68b-aed832204fc8" containerID="6e52092c597c2064da5d45b464268075c8a0d67e2367adec76a505262da0471b" exitCode=0 Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.352350 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" event={"ID":"2d9ee62f-4dcf-447b-a68b-aed832204fc8","Type":"ContainerDied","Data":"6e52092c597c2064da5d45b464268075c8a0d67e2367adec76a505262da0471b"} Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.791928 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.959206 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.980977 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5z659\" (UniqueName: \"kubernetes.io/projected/2d9ee62f-4dcf-447b-a68b-aed832204fc8-kube-api-access-5z659\") pod \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.981894 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-nb\") pod \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.982038 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-dns-svc\") pod \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.982094 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-config\") pod \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " Feb 16 14:45:05 crc kubenswrapper[4816]: I0216 14:45:05.982214 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-sb\") pod \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\" (UID: \"2d9ee62f-4dcf-447b-a68b-aed832204fc8\") " Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.002973 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d9ee62f-4dcf-447b-a68b-aed832204fc8-kube-api-access-5z659" (OuterVolumeSpecName: "kube-api-access-5z659") pod "2d9ee62f-4dcf-447b-a68b-aed832204fc8" (UID: "2d9ee62f-4dcf-447b-a68b-aed832204fc8"). InnerVolumeSpecName "kube-api-access-5z659". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.045477 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2d9ee62f-4dcf-447b-a68b-aed832204fc8" (UID: "2d9ee62f-4dcf-447b-a68b-aed832204fc8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.045925 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2d9ee62f-4dcf-447b-a68b-aed832204fc8" (UID: "2d9ee62f-4dcf-447b-a68b-aed832204fc8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.047894 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2d9ee62f-4dcf-447b-a68b-aed832204fc8" (UID: "2d9ee62f-4dcf-447b-a68b-aed832204fc8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.054984 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-config" (OuterVolumeSpecName: "config") pod "2d9ee62f-4dcf-447b-a68b-aed832204fc8" (UID: "2d9ee62f-4dcf-447b-a68b-aed832204fc8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.084407 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4805de8d-a273-4606-9758-3640f20946d5-config-volume\") pod \"4805de8d-a273-4606-9758-3640f20946d5\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.084491 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4805de8d-a273-4606-9758-3640f20946d5-secret-volume\") pod \"4805de8d-a273-4606-9758-3640f20946d5\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.084530 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fhfd\" (UniqueName: \"kubernetes.io/projected/4805de8d-a273-4606-9758-3640f20946d5-kube-api-access-8fhfd\") pod \"4805de8d-a273-4606-9758-3640f20946d5\" (UID: \"4805de8d-a273-4606-9758-3640f20946d5\") " Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.085005 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.085021 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.085030 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.085039 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5z659\" (UniqueName: \"kubernetes.io/projected/2d9ee62f-4dcf-447b-a68b-aed832204fc8-kube-api-access-5z659\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.085048 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d9ee62f-4dcf-447b-a68b-aed832204fc8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.085306 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4805de8d-a273-4606-9758-3640f20946d5-config-volume" (OuterVolumeSpecName: "config-volume") pod "4805de8d-a273-4606-9758-3640f20946d5" (UID: "4805de8d-a273-4606-9758-3640f20946d5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.089975 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4805de8d-a273-4606-9758-3640f20946d5-kube-api-access-8fhfd" (OuterVolumeSpecName: "kube-api-access-8fhfd") pod "4805de8d-a273-4606-9758-3640f20946d5" (UID: "4805de8d-a273-4606-9758-3640f20946d5"). InnerVolumeSpecName "kube-api-access-8fhfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.090068 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4805de8d-a273-4606-9758-3640f20946d5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4805de8d-a273-4606-9758-3640f20946d5" (UID: "4805de8d-a273-4606-9758-3640f20946d5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.187304 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4805de8d-a273-4606-9758-3640f20946d5-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.187714 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4805de8d-a273-4606-9758-3640f20946d5-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.187862 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fhfd\" (UniqueName: \"kubernetes.io/projected/4805de8d-a273-4606-9758-3640f20946d5-kube-api-access-8fhfd\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.363893 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" event={"ID":"2d9ee62f-4dcf-447b-a68b-aed832204fc8","Type":"ContainerDied","Data":"c6f93af4dcb299d9f8c383799c51baf26f6885ff41ec451f941fbc9290d330e1"} Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.364267 4816 scope.go:117] "RemoveContainer" containerID="6e52092c597c2064da5d45b464268075c8a0d67e2367adec76a505262da0471b" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.364190 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6999d8bd47-28jrp" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.374763 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.374863 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr" event={"ID":"4805de8d-a273-4606-9758-3640f20946d5","Type":"ContainerDied","Data":"5756c0c210d2f79df285618666f1ae6f926c61f3e7f96008e69dd77594ba6203"} Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.374916 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5756c0c210d2f79df285618666f1ae6f926c61f3e7f96008e69dd77594ba6203" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.399560 4816 scope.go:117] "RemoveContainer" containerID="d5dda9dea26d670e878147505fe29f79d11ce51b98e636e7a59cdf39e47a38fe" Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.409472 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6999d8bd47-28jrp"] Feb 16 14:45:06 crc kubenswrapper[4816]: I0216 14:45:06.420893 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6999d8bd47-28jrp"] Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.064841 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp"] Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.078452 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520840-mqfvp"] Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.405474 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:45:07 crc kubenswrapper[4816]: E0216 14:45:07.405732 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.413469 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d9ee62f-4dcf-447b-a68b-aed832204fc8" path="/var/lib/kubelet/pods/2d9ee62f-4dcf-447b-a68b-aed832204fc8/volumes" Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.414218 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15" path="/var/lib/kubelet/pods/84dd290c-7c09-4c8b-b9fa-1ed4eb01fb15/volumes" Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.745035 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.745334 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="ceilometer-central-agent" containerID="cri-o://7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0" gracePeriod=30 Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.745489 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="sg-core" containerID="cri-o://e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a" gracePeriod=30 Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.745524 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="proxy-httpd" containerID="cri-o://70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb" gracePeriod=30 Feb 16 14:45:07 crc kubenswrapper[4816]: I0216 14:45:07.745507 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="ceilometer-notification-agent" containerID="cri-o://f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646" gracePeriod=30 Feb 16 14:45:08 crc kubenswrapper[4816]: I0216 14:45:08.399430 4816 generic.go:334] "Generic (PLEG): container finished" podID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerID="70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb" exitCode=0 Feb 16 14:45:08 crc kubenswrapper[4816]: I0216 14:45:08.399793 4816 generic.go:334] "Generic (PLEG): container finished" podID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerID="e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a" exitCode=2 Feb 16 14:45:08 crc kubenswrapper[4816]: I0216 14:45:08.399803 4816 generic.go:334] "Generic (PLEG): container finished" podID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerID="7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0" exitCode=0 Feb 16 14:45:08 crc kubenswrapper[4816]: I0216 14:45:08.399822 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerDied","Data":"70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb"} Feb 16 14:45:08 crc kubenswrapper[4816]: I0216 14:45:08.399844 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerDied","Data":"e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a"} Feb 16 14:45:08 crc kubenswrapper[4816]: I0216 14:45:08.399856 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerDied","Data":"7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0"} Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.058252 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-29hsq"] Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.073454 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-d401-account-create-update-wfx9n"] Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.081771 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-29hsq"] Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.091642 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-d401-account-create-update-wfx9n"] Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.125743 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.239732 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fd7e6cd_5936_4e4b_a11e_42b171c5b367.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.278450 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-combined-ca-bundle\") pod \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.278526 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-scripts\") pod \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.278562 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5td2w\" (UniqueName: \"kubernetes.io/projected/db5a28ec-6409-4676-b4d5-2dd89574c2a2-kube-api-access-5td2w\") pod \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.278624 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-run-httpd\") pod \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.278733 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-log-httpd\") pod \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.278766 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-config-data\") pod \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.278909 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-sg-core-conf-yaml\") pod \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\" (UID: \"db5a28ec-6409-4676-b4d5-2dd89574c2a2\") " Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.281949 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "db5a28ec-6409-4676-b4d5-2dd89574c2a2" (UID: "db5a28ec-6409-4676-b4d5-2dd89574c2a2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.282307 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "db5a28ec-6409-4676-b4d5-2dd89574c2a2" (UID: "db5a28ec-6409-4676-b4d5-2dd89574c2a2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.288071 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-scripts" (OuterVolumeSpecName: "scripts") pod "db5a28ec-6409-4676-b4d5-2dd89574c2a2" (UID: "db5a28ec-6409-4676-b4d5-2dd89574c2a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.288184 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db5a28ec-6409-4676-b4d5-2dd89574c2a2-kube-api-access-5td2w" (OuterVolumeSpecName: "kube-api-access-5td2w") pod "db5a28ec-6409-4676-b4d5-2dd89574c2a2" (UID: "db5a28ec-6409-4676-b4d5-2dd89574c2a2"). InnerVolumeSpecName "kube-api-access-5td2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.315843 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "db5a28ec-6409-4676-b4d5-2dd89574c2a2" (UID: "db5a28ec-6409-4676-b4d5-2dd89574c2a2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.372737 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db5a28ec-6409-4676-b4d5-2dd89574c2a2" (UID: "db5a28ec-6409-4676-b4d5-2dd89574c2a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.381538 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.381568 4816 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-scripts\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.381578 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5td2w\" (UniqueName: \"kubernetes.io/projected/db5a28ec-6409-4676-b4d5-2dd89574c2a2-kube-api-access-5td2w\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.381588 4816 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.381596 4816 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5a28ec-6409-4676-b4d5-2dd89574c2a2-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.381613 4816 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.418427 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-config-data" (OuterVolumeSpecName: "config-data") pod "db5a28ec-6409-4676-b4d5-2dd89574c2a2" (UID: "db5a28ec-6409-4676-b4d5-2dd89574c2a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.420916 4816 generic.go:334] "Generic (PLEG): container finished" podID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerID="f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646" exitCode=0 Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.420961 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerDied","Data":"f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646"} Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.420993 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db5a28ec-6409-4676-b4d5-2dd89574c2a2","Type":"ContainerDied","Data":"67b2e6e2c85dca3d515af7e6dadc23db820e8e4b0ff651f38b261039f3214c90"} Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.421015 4816 scope.go:117] "RemoveContainer" containerID="70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.421168 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.454778 4816 scope.go:117] "RemoveContainer" containerID="e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.458116 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.474364 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.482716 4816 scope.go:117] "RemoveContainer" containerID="f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.484672 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5a28ec-6409-4676-b4d5-2dd89574c2a2-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.492683 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.493205 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4805de8d-a273-4606-9758-3640f20946d5" containerName="collect-profiles" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493229 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4805de8d-a273-4606-9758-3640f20946d5" containerName="collect-profiles" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.493250 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d9ee62f-4dcf-447b-a68b-aed832204fc8" containerName="dnsmasq-dns" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493268 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d9ee62f-4dcf-447b-a68b-aed832204fc8" containerName="dnsmasq-dns" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.493295 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="proxy-httpd" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493303 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="proxy-httpd" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.493317 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="ceilometer-notification-agent" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493326 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="ceilometer-notification-agent" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.493345 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="ceilometer-central-agent" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493354 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="ceilometer-central-agent" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.493406 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d9ee62f-4dcf-447b-a68b-aed832204fc8" containerName="init" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493414 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d9ee62f-4dcf-447b-a68b-aed832204fc8" containerName="init" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.493429 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="sg-core" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493437 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="sg-core" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493690 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d9ee62f-4dcf-447b-a68b-aed832204fc8" containerName="dnsmasq-dns" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493712 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="ceilometer-notification-agent" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493727 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="sg-core" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493738 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4805de8d-a273-4606-9758-3640f20946d5" containerName="collect-profiles" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493750 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="proxy-httpd" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.493785 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" containerName="ceilometer-central-agent" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.496579 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.503460 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.503493 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.509480 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.517951 4816 scope.go:117] "RemoveContainer" containerID="7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.559447 4816 scope.go:117] "RemoveContainer" containerID="70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.559918 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb\": container with ID starting with 70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb not found: ID does not exist" containerID="70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.559950 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb"} err="failed to get container status \"70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb\": rpc error: code = NotFound desc = could not find container \"70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb\": container with ID starting with 70b47f1350f73490f7a61b21b99a2142c817c0ef26830d98f58c1b4d295481cb not found: ID does not exist" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.559970 4816 scope.go:117] "RemoveContainer" containerID="e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.563194 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a\": container with ID starting with e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a not found: ID does not exist" containerID="e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.563245 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a"} err="failed to get container status \"e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a\": rpc error: code = NotFound desc = could not find container \"e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a\": container with ID starting with e316d70301d0958ebe18880fc602ca3e71b2e2167e19ae28f07f543e8bb6e39a not found: ID does not exist" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.563273 4816 scope.go:117] "RemoveContainer" containerID="f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.563760 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646\": container with ID starting with f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646 not found: ID does not exist" containerID="f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.563792 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646"} err="failed to get container status \"f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646\": rpc error: code = NotFound desc = could not find container \"f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646\": container with ID starting with f177cfcf2bdca835712e9c6f49c1dfa3c17134619d5d489c1f0b2070fdd2a646 not found: ID does not exist" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.563813 4816 scope.go:117] "RemoveContainer" containerID="7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0" Feb 16 14:45:10 crc kubenswrapper[4816]: E0216 14:45:10.564153 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0\": container with ID starting with 7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0 not found: ID does not exist" containerID="7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.564178 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0"} err="failed to get container status \"7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0\": rpc error: code = NotFound desc = could not find container \"7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0\": container with ID starting with 7bc1ad9be04c781ea95a9ccfc2c4b30f20b60cf35d7fdb7b36f004cb57368ce0 not found: ID does not exist" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.586977 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/720b8c85-0246-4259-9e80-35a4fd4c7242-log-httpd\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.587081 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pprl\" (UniqueName: \"kubernetes.io/projected/720b8c85-0246-4259-9e80-35a4fd4c7242-kube-api-access-9pprl\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.587140 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-scripts\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.587169 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/720b8c85-0246-4259-9e80-35a4fd4c7242-run-httpd\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.587454 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-config-data\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.587761 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.587840 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.689406 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-config-data\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.689582 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.689681 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.689722 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/720b8c85-0246-4259-9e80-35a4fd4c7242-log-httpd\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.689746 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pprl\" (UniqueName: \"kubernetes.io/projected/720b8c85-0246-4259-9e80-35a4fd4c7242-kube-api-access-9pprl\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.689774 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-scripts\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.689794 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/720b8c85-0246-4259-9e80-35a4fd4c7242-run-httpd\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.690384 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/720b8c85-0246-4259-9e80-35a4fd4c7242-run-httpd\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.690575 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/720b8c85-0246-4259-9e80-35a4fd4c7242-log-httpd\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.693951 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-scripts\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.694857 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-config-data\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.696533 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.697257 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/720b8c85-0246-4259-9e80-35a4fd4c7242-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.713366 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pprl\" (UniqueName: \"kubernetes.io/projected/720b8c85-0246-4259-9e80-35a4fd4c7242-kube-api-access-9pprl\") pod \"ceilometer-0\" (UID: \"720b8c85-0246-4259-9e80-35a4fd4c7242\") " pod="openstack/ceilometer-0" Feb 16 14:45:10 crc kubenswrapper[4816]: I0216 14:45:10.820893 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 16 14:45:11 crc kubenswrapper[4816]: I0216 14:45:11.370054 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 16 14:45:11 crc kubenswrapper[4816]: W0216 14:45:11.371593 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod720b8c85_0246_4259_9e80_35a4fd4c7242.slice/crio-b295790390ea5e2f0ee1d36aed3f12374826b089d702cf3a882ff9cec84fd9b2 WatchSource:0}: Error finding container b295790390ea5e2f0ee1d36aed3f12374826b089d702cf3a882ff9cec84fd9b2: Status 404 returned error can't find the container with id b295790390ea5e2f0ee1d36aed3f12374826b089d702cf3a882ff9cec84fd9b2 Feb 16 14:45:11 crc kubenswrapper[4816]: I0216 14:45:11.421233 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1390b28a-5190-432a-826c-66483fceb03c" path="/var/lib/kubelet/pods/1390b28a-5190-432a-826c-66483fceb03c/volumes" Feb 16 14:45:11 crc kubenswrapper[4816]: I0216 14:45:11.422167 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65f10b42-4a5e-4960-8890-1b623275accf" path="/var/lib/kubelet/pods/65f10b42-4a5e-4960-8890-1b623275accf/volumes" Feb 16 14:45:11 crc kubenswrapper[4816]: I0216 14:45:11.423338 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db5a28ec-6409-4676-b4d5-2dd89574c2a2" path="/var/lib/kubelet/pods/db5a28ec-6409-4676-b4d5-2dd89574c2a2/volumes" Feb 16 14:45:11 crc kubenswrapper[4816]: I0216 14:45:11.450260 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"720b8c85-0246-4259-9e80-35a4fd4c7242","Type":"ContainerStarted","Data":"b295790390ea5e2f0ee1d36aed3f12374826b089d702cf3a882ff9cec84fd9b2"} Feb 16 14:45:12 crc kubenswrapper[4816]: I0216 14:45:12.466101 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"720b8c85-0246-4259-9e80-35a4fd4c7242","Type":"ContainerStarted","Data":"17d388f2b72701fd0684288d2dd81c6c16504df3e1ee5c2f07256c875b236ba8"} Feb 16 14:45:13 crc kubenswrapper[4816]: I0216 14:45:13.476907 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"720b8c85-0246-4259-9e80-35a4fd4c7242","Type":"ContainerStarted","Data":"f55a4e863f6098d57d1ffcd28f71709ef6f70e95e4294bd99816beae1809bc42"} Feb 16 14:45:14 crc kubenswrapper[4816]: I0216 14:45:14.487056 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"720b8c85-0246-4259-9e80-35a4fd4c7242","Type":"ContainerStarted","Data":"8cf240330face1a00add4383415a2ff93ada1c50ebd27b999954fe1d795bf51b"} Feb 16 14:45:16 crc kubenswrapper[4816]: I0216 14:45:16.436975 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Feb 16 14:45:16 crc kubenswrapper[4816]: I0216 14:45:16.508910 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"720b8c85-0246-4259-9e80-35a4fd4c7242","Type":"ContainerStarted","Data":"3d21da63b903e2c1c791f1144c071d500be1080bbf174cd7d414ce1213f990bc"} Feb 16 14:45:16 crc kubenswrapper[4816]: I0216 14:45:16.509117 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 16 14:45:16 crc kubenswrapper[4816]: I0216 14:45:16.549838 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.441656439 podStartE2EDuration="6.549817214s" podCreationTimestamp="2026-02-16 14:45:10 +0000 UTC" firstStartedPulling="2026-02-16 14:45:11.378464671 +0000 UTC m=+6110.705178399" lastFinishedPulling="2026-02-16 14:45:15.486625446 +0000 UTC m=+6114.813339174" observedRunningTime="2026-02-16 14:45:16.540141851 +0000 UTC m=+6115.866855589" watchObservedRunningTime="2026-02-16 14:45:16.549817214 +0000 UTC m=+6115.876530942" Feb 16 14:45:16 crc kubenswrapper[4816]: I0216 14:45:16.613069 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Feb 16 14:45:16 crc kubenswrapper[4816]: I0216 14:45:16.732572 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Feb 16 14:45:18 crc kubenswrapper[4816]: I0216 14:45:18.041764 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-mltdx"] Feb 16 14:45:18 crc kubenswrapper[4816]: I0216 14:45:18.051306 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-mltdx"] Feb 16 14:45:18 crc kubenswrapper[4816]: I0216 14:45:18.399635 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:45:18 crc kubenswrapper[4816]: E0216 14:45:18.400232 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:45:19 crc kubenswrapper[4816]: I0216 14:45:19.410594 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11d5e04a-cb16-4c72-b4dc-0f00875aff0f" path="/var/lib/kubelet/pods/11d5e04a-cb16-4c72-b4dc-0f00875aff0f/volumes" Feb 16 14:45:20 crc kubenswrapper[4816]: E0216 14:45:20.489577 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fd7e6cd_5936_4e4b_a11e_42b171c5b367.slice\": RecentStats: unable to find data in memory cache]" Feb 16 14:45:29 crc kubenswrapper[4816]: I0216 14:45:29.399751 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:45:29 crc kubenswrapper[4816]: E0216 14:45:29.401054 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:45:30 crc kubenswrapper[4816]: I0216 14:45:30.550039 4816 scope.go:117] "RemoveContainer" containerID="f84881e3e26461693a7c5e674f08b3d838351b759793b98bb6e3970c8b9ee75f" Feb 16 14:45:30 crc kubenswrapper[4816]: I0216 14:45:30.581896 4816 scope.go:117] "RemoveContainer" containerID="4965f5cca73baaeff91e862c10d664bc2ab212deaa5d4a969165258976ec3ca3" Feb 16 14:45:30 crc kubenswrapper[4816]: I0216 14:45:30.644193 4816 scope.go:117] "RemoveContainer" containerID="87e9679b1f6263fb3e21b272f03939b5b7d49f31e9febe687650d8061b82d225" Feb 16 14:45:30 crc kubenswrapper[4816]: I0216 14:45:30.701368 4816 scope.go:117] "RemoveContainer" containerID="3d2aca137654cf47646b404af68c7e5bb03a93e9985f0b52a42c885b98fc1c31" Feb 16 14:45:30 crc kubenswrapper[4816]: I0216 14:45:30.738689 4816 scope.go:117] "RemoveContainer" containerID="c21b0c7e49dfad3a38e053c7e2af0473d7dfe30c0569f7d62d82f5b7342f6f96" Feb 16 14:45:30 crc kubenswrapper[4816]: I0216 14:45:30.784122 4816 scope.go:117] "RemoveContainer" containerID="9dea1ed72e3c2242512c437d988880780b2d245ef9de47d5bffcd43c40d84393" Feb 16 14:45:30 crc kubenswrapper[4816]: I0216 14:45:30.842932 4816 scope.go:117] "RemoveContainer" containerID="9451f71bb2c41a91e3079046ffd44d9e7367e164e313fbe5e0aa1b062154868e" Feb 16 14:45:40 crc kubenswrapper[4816]: I0216 14:45:40.399498 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:45:40 crc kubenswrapper[4816]: E0216 14:45:40.400548 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:45:40 crc kubenswrapper[4816]: I0216 14:45:40.827707 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 16 14:45:55 crc kubenswrapper[4816]: I0216 14:45:55.403517 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:45:55 crc kubenswrapper[4816]: E0216 14:45:55.404546 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.133748 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d4bb58b95-nhbdm"] Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.137524 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.139547 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.152402 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d4bb58b95-nhbdm"] Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.280199 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-sb\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.280582 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-openstack-cell1\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.280687 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-nb\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.280724 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x955l\" (UniqueName: \"kubernetes.io/projected/5a619792-f3a4-47d8-9f52-0f5948042b9d-kube-api-access-x955l\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.280787 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-config\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.280844 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-dns-svc\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.382789 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-dns-svc\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.382870 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-sb\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.382907 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-openstack-cell1\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.382992 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-nb\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.383025 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x955l\" (UniqueName: \"kubernetes.io/projected/5a619792-f3a4-47d8-9f52-0f5948042b9d-kube-api-access-x955l\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.383081 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-config\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.383775 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-dns-svc\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.384138 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-config\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.384442 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-openstack-cell1\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.384946 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-nb\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.385227 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-sb\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.405842 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x955l\" (UniqueName: \"kubernetes.io/projected/5a619792-f3a4-47d8-9f52-0f5948042b9d-kube-api-access-x955l\") pod \"dnsmasq-dns-6d4bb58b95-nhbdm\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.464394 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:45:58 crc kubenswrapper[4816]: I0216 14:45:58.973602 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d4bb58b95-nhbdm"] Feb 16 14:45:59 crc kubenswrapper[4816]: I0216 14:45:59.976727 4816 generic.go:334] "Generic (PLEG): container finished" podID="5a619792-f3a4-47d8-9f52-0f5948042b9d" containerID="fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73" exitCode=0 Feb 16 14:45:59 crc kubenswrapper[4816]: I0216 14:45:59.976949 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" event={"ID":"5a619792-f3a4-47d8-9f52-0f5948042b9d","Type":"ContainerDied","Data":"fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73"} Feb 16 14:45:59 crc kubenswrapper[4816]: I0216 14:45:59.977224 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" event={"ID":"5a619792-f3a4-47d8-9f52-0f5948042b9d","Type":"ContainerStarted","Data":"dac630f177f6bb5bf97ba8aba9ef7f749af8f718ddf23ebfad6a0c401888016a"} Feb 16 14:46:00 crc kubenswrapper[4816]: I0216 14:46:00.987354 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" event={"ID":"5a619792-f3a4-47d8-9f52-0f5948042b9d","Type":"ContainerStarted","Data":"5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c"} Feb 16 14:46:00 crc kubenswrapper[4816]: I0216 14:46:00.987775 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:46:01 crc kubenswrapper[4816]: I0216 14:46:01.020135 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" podStartSLOduration=3.020112849 podStartE2EDuration="3.020112849s" podCreationTimestamp="2026-02-16 14:45:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:46:01.013684533 +0000 UTC m=+6160.340398271" watchObservedRunningTime="2026-02-16 14:46:01.020112849 +0000 UTC m=+6160.346826587" Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.465838 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.531698 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-866d8d774c-svqvq"] Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.534344 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" podUID="de61dafb-9371-4968-8c26-702d7b9e6be6" containerName="dnsmasq-dns" containerID="cri-o://c899cca45673f1401834bf58e18a209390d0f44369be07397582122dd3ed894c" gracePeriod=10 Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.776449 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5644db77f7-8gd7t"] Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.778546 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.802375 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5644db77f7-8gd7t"] Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.941940 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-openstack-cell1\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.942234 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-dns-svc\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.942328 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njbqp\" (UniqueName: \"kubernetes.io/projected/4c717d50-6be2-467f-b637-873c00617e0e-kube-api-access-njbqp\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.942430 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-ovsdbserver-sb\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.942548 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-ovsdbserver-nb\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:08 crc kubenswrapper[4816]: I0216 14:46:08.942869 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-config\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.045299 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-openstack-cell1\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.045365 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-dns-svc\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.045402 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njbqp\" (UniqueName: \"kubernetes.io/projected/4c717d50-6be2-467f-b637-873c00617e0e-kube-api-access-njbqp\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.045442 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-ovsdbserver-sb\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.045493 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-ovsdbserver-nb\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.045527 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-config\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.046483 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-config\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.046848 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-openstack-cell1\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.047679 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-ovsdbserver-nb\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.047984 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-ovsdbserver-sb\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.048942 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c717d50-6be2-467f-b637-873c00617e0e-dns-svc\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.067610 4816 generic.go:334] "Generic (PLEG): container finished" podID="de61dafb-9371-4968-8c26-702d7b9e6be6" containerID="c899cca45673f1401834bf58e18a209390d0f44369be07397582122dd3ed894c" exitCode=0 Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.067675 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" event={"ID":"de61dafb-9371-4968-8c26-702d7b9e6be6","Type":"ContainerDied","Data":"c899cca45673f1401834bf58e18a209390d0f44369be07397582122dd3ed894c"} Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.077881 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njbqp\" (UniqueName: \"kubernetes.io/projected/4c717d50-6be2-467f-b637-873c00617e0e-kube-api-access-njbqp\") pod \"dnsmasq-dns-5644db77f7-8gd7t\" (UID: \"4c717d50-6be2-467f-b637-873c00617e0e\") " pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.123435 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.327611 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.402941 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:46:09 crc kubenswrapper[4816]: E0216 14:46:09.403278 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.456585 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nr97\" (UniqueName: \"kubernetes.io/projected/de61dafb-9371-4968-8c26-702d7b9e6be6-kube-api-access-8nr97\") pod \"de61dafb-9371-4968-8c26-702d7b9e6be6\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.456871 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-config\") pod \"de61dafb-9371-4968-8c26-702d7b9e6be6\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.456936 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-nb\") pod \"de61dafb-9371-4968-8c26-702d7b9e6be6\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.456979 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-dns-svc\") pod \"de61dafb-9371-4968-8c26-702d7b9e6be6\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.461183 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-sb\") pod \"de61dafb-9371-4968-8c26-702d7b9e6be6\" (UID: \"de61dafb-9371-4968-8c26-702d7b9e6be6\") " Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.461999 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de61dafb-9371-4968-8c26-702d7b9e6be6-kube-api-access-8nr97" (OuterVolumeSpecName: "kube-api-access-8nr97") pod "de61dafb-9371-4968-8c26-702d7b9e6be6" (UID: "de61dafb-9371-4968-8c26-702d7b9e6be6"). InnerVolumeSpecName "kube-api-access-8nr97". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.462316 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nr97\" (UniqueName: \"kubernetes.io/projected/de61dafb-9371-4968-8c26-702d7b9e6be6-kube-api-access-8nr97\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.507579 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-config" (OuterVolumeSpecName: "config") pod "de61dafb-9371-4968-8c26-702d7b9e6be6" (UID: "de61dafb-9371-4968-8c26-702d7b9e6be6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.512309 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "de61dafb-9371-4968-8c26-702d7b9e6be6" (UID: "de61dafb-9371-4968-8c26-702d7b9e6be6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.517450 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "de61dafb-9371-4968-8c26-702d7b9e6be6" (UID: "de61dafb-9371-4968-8c26-702d7b9e6be6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.522246 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "de61dafb-9371-4968-8c26-702d7b9e6be6" (UID: "de61dafb-9371-4968-8c26-702d7b9e6be6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.564637 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.564689 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.564699 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.564708 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de61dafb-9371-4968-8c26-702d7b9e6be6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:09 crc kubenswrapper[4816]: I0216 14:46:09.605341 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5644db77f7-8gd7t"] Feb 16 14:46:10 crc kubenswrapper[4816]: I0216 14:46:10.087143 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" event={"ID":"de61dafb-9371-4968-8c26-702d7b9e6be6","Type":"ContainerDied","Data":"e2940aac2a1b589b6f7814fd2e7d147f96234c503be1bbd255040f4ad4983cdb"} Feb 16 14:46:10 crc kubenswrapper[4816]: I0216 14:46:10.087481 4816 scope.go:117] "RemoveContainer" containerID="c899cca45673f1401834bf58e18a209390d0f44369be07397582122dd3ed894c" Feb 16 14:46:10 crc kubenswrapper[4816]: I0216 14:46:10.087159 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-866d8d774c-svqvq" Feb 16 14:46:10 crc kubenswrapper[4816]: I0216 14:46:10.089068 4816 generic.go:334] "Generic (PLEG): container finished" podID="4c717d50-6be2-467f-b637-873c00617e0e" containerID="67361a208f3be502f40fc80f6379eea41d7a3a6320417ba35d966a2deeb8056f" exitCode=0 Feb 16 14:46:10 crc kubenswrapper[4816]: I0216 14:46:10.089132 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" event={"ID":"4c717d50-6be2-467f-b637-873c00617e0e","Type":"ContainerDied","Data":"67361a208f3be502f40fc80f6379eea41d7a3a6320417ba35d966a2deeb8056f"} Feb 16 14:46:10 crc kubenswrapper[4816]: I0216 14:46:10.089174 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" event={"ID":"4c717d50-6be2-467f-b637-873c00617e0e","Type":"ContainerStarted","Data":"d9a54bc2cbb9e132af842fd3a7387675167fd1a35cd3f236a79f9305a348eedd"} Feb 16 14:46:10 crc kubenswrapper[4816]: I0216 14:46:10.111604 4816 scope.go:117] "RemoveContainer" containerID="87ea851028b71de5b61655b0b79c23d02fc959143114ca9e62f9ab35b8cdec87" Feb 16 14:46:10 crc kubenswrapper[4816]: I0216 14:46:10.318271 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-866d8d774c-svqvq"] Feb 16 14:46:10 crc kubenswrapper[4816]: I0216 14:46:10.331766 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-866d8d774c-svqvq"] Feb 16 14:46:11 crc kubenswrapper[4816]: I0216 14:46:11.102872 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" event={"ID":"4c717d50-6be2-467f-b637-873c00617e0e","Type":"ContainerStarted","Data":"515111db4a3f382e827b4aa22692adab3457d707ba05f1bbd5182eb54dd4f516"} Feb 16 14:46:11 crc kubenswrapper[4816]: I0216 14:46:11.103042 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:11 crc kubenswrapper[4816]: I0216 14:46:11.140854 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" podStartSLOduration=3.14083617 podStartE2EDuration="3.14083617s" podCreationTimestamp="2026-02-16 14:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 14:46:11.120526857 +0000 UTC m=+6170.447240575" watchObservedRunningTime="2026-02-16 14:46:11.14083617 +0000 UTC m=+6170.467549898" Feb 16 14:46:11 crc kubenswrapper[4816]: I0216 14:46:11.414818 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de61dafb-9371-4968-8c26-702d7b9e6be6" path="/var/lib/kubelet/pods/de61dafb-9371-4968-8c26-702d7b9e6be6/volumes" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.124889 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5644db77f7-8gd7t" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.199032 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d4bb58b95-nhbdm"] Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.199328 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" podUID="5a619792-f3a4-47d8-9f52-0f5948042b9d" containerName="dnsmasq-dns" containerID="cri-o://5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c" gracePeriod=10 Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.724197 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.836016 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-nb\") pod \"5a619792-f3a4-47d8-9f52-0f5948042b9d\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.836155 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-openstack-cell1\") pod \"5a619792-f3a4-47d8-9f52-0f5948042b9d\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.836217 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-dns-svc\") pod \"5a619792-f3a4-47d8-9f52-0f5948042b9d\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.836254 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-config\") pod \"5a619792-f3a4-47d8-9f52-0f5948042b9d\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.836403 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-sb\") pod \"5a619792-f3a4-47d8-9f52-0f5948042b9d\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.836432 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x955l\" (UniqueName: \"kubernetes.io/projected/5a619792-f3a4-47d8-9f52-0f5948042b9d-kube-api-access-x955l\") pod \"5a619792-f3a4-47d8-9f52-0f5948042b9d\" (UID: \"5a619792-f3a4-47d8-9f52-0f5948042b9d\") " Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.842061 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a619792-f3a4-47d8-9f52-0f5948042b9d-kube-api-access-x955l" (OuterVolumeSpecName: "kube-api-access-x955l") pod "5a619792-f3a4-47d8-9f52-0f5948042b9d" (UID: "5a619792-f3a4-47d8-9f52-0f5948042b9d"). InnerVolumeSpecName "kube-api-access-x955l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.896942 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "5a619792-f3a4-47d8-9f52-0f5948042b9d" (UID: "5a619792-f3a4-47d8-9f52-0f5948042b9d"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.901385 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5a619792-f3a4-47d8-9f52-0f5948042b9d" (UID: "5a619792-f3a4-47d8-9f52-0f5948042b9d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.904850 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5a619792-f3a4-47d8-9f52-0f5948042b9d" (UID: "5a619792-f3a4-47d8-9f52-0f5948042b9d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.908895 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5a619792-f3a4-47d8-9f52-0f5948042b9d" (UID: "5a619792-f3a4-47d8-9f52-0f5948042b9d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.916878 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-config" (OuterVolumeSpecName: "config") pod "5a619792-f3a4-47d8-9f52-0f5948042b9d" (UID: "5a619792-f3a4-47d8-9f52-0f5948042b9d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.939586 4816 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.939629 4816 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-config\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.939639 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.939670 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x955l\" (UniqueName: \"kubernetes.io/projected/5a619792-f3a4-47d8-9f52-0f5948042b9d-kube-api-access-x955l\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.939680 4816 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:19 crc kubenswrapper[4816]: I0216 14:46:19.939690 4816 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5a619792-f3a4-47d8-9f52-0f5948042b9d-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.189229 4816 generic.go:334] "Generic (PLEG): container finished" podID="5a619792-f3a4-47d8-9f52-0f5948042b9d" containerID="5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c" exitCode=0 Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.189270 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" event={"ID":"5a619792-f3a4-47d8-9f52-0f5948042b9d","Type":"ContainerDied","Data":"5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c"} Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.189297 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" event={"ID":"5a619792-f3a4-47d8-9f52-0f5948042b9d","Type":"ContainerDied","Data":"dac630f177f6bb5bf97ba8aba9ef7f749af8f718ddf23ebfad6a0c401888016a"} Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.189315 4816 scope.go:117] "RemoveContainer" containerID="5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c" Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.189311 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d4bb58b95-nhbdm" Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.217036 4816 scope.go:117] "RemoveContainer" containerID="fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73" Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.241389 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d4bb58b95-nhbdm"] Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.246400 4816 scope.go:117] "RemoveContainer" containerID="5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c" Feb 16 14:46:20 crc kubenswrapper[4816]: E0216 14:46:20.247351 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c\": container with ID starting with 5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c not found: ID does not exist" containerID="5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c" Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.247409 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c"} err="failed to get container status \"5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c\": rpc error: code = NotFound desc = could not find container \"5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c\": container with ID starting with 5c1483553b90c63691dd0eed91cbeb1c0744f4ce020efec19faa033ab8e3da3c not found: ID does not exist" Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.247442 4816 scope.go:117] "RemoveContainer" containerID="fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73" Feb 16 14:46:20 crc kubenswrapper[4816]: E0216 14:46:20.248076 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73\": container with ID starting with fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73 not found: ID does not exist" containerID="fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73" Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.248149 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73"} err="failed to get container status \"fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73\": rpc error: code = NotFound desc = could not find container \"fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73\": container with ID starting with fa99eac50ede12e73054ee60d4b90063b3445cbe2ee9253affc1ad5934785c73 not found: ID does not exist" Feb 16 14:46:20 crc kubenswrapper[4816]: I0216 14:46:20.254852 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d4bb58b95-nhbdm"] Feb 16 14:46:21 crc kubenswrapper[4816]: I0216 14:46:21.419310 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a619792-f3a4-47d8-9f52-0f5948042b9d" path="/var/lib/kubelet/pods/5a619792-f3a4-47d8-9f52-0f5948042b9d/volumes" Feb 16 14:46:24 crc kubenswrapper[4816]: I0216 14:46:24.399380 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:46:24 crc kubenswrapper[4816]: E0216 14:46:24.400391 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.874889 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp"] Feb 16 14:46:29 crc kubenswrapper[4816]: E0216 14:46:29.877228 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de61dafb-9371-4968-8c26-702d7b9e6be6" containerName="init" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.877326 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="de61dafb-9371-4968-8c26-702d7b9e6be6" containerName="init" Feb 16 14:46:29 crc kubenswrapper[4816]: E0216 14:46:29.877416 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a619792-f3a4-47d8-9f52-0f5948042b9d" containerName="dnsmasq-dns" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.877492 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a619792-f3a4-47d8-9f52-0f5948042b9d" containerName="dnsmasq-dns" Feb 16 14:46:29 crc kubenswrapper[4816]: E0216 14:46:29.877605 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a619792-f3a4-47d8-9f52-0f5948042b9d" containerName="init" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.877700 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a619792-f3a4-47d8-9f52-0f5948042b9d" containerName="init" Feb 16 14:46:29 crc kubenswrapper[4816]: E0216 14:46:29.877778 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de61dafb-9371-4968-8c26-702d7b9e6be6" containerName="dnsmasq-dns" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.877837 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="de61dafb-9371-4968-8c26-702d7b9e6be6" containerName="dnsmasq-dns" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.878238 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="de61dafb-9371-4968-8c26-702d7b9e6be6" containerName="dnsmasq-dns" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.878348 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a619792-f3a4-47d8-9f52-0f5948042b9d" containerName="dnsmasq-dns" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.879389 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.881876 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.881965 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.883379 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.893201 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.899270 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp"] Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.952929 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.953020 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.953076 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lw7s\" (UniqueName: \"kubernetes.io/projected/37cb37ad-8a44-4fad-bee1-b2fad177b667-kube-api-access-6lw7s\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.953148 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:29 crc kubenswrapper[4816]: I0216 14:46:29.953203 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.056164 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.056458 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.056569 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lw7s\" (UniqueName: \"kubernetes.io/projected/37cb37ad-8a44-4fad-bee1-b2fad177b667-kube-api-access-6lw7s\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.056799 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.056948 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.066891 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.067098 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.067855 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.068010 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.087735 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lw7s\" (UniqueName: \"kubernetes.io/projected/37cb37ad-8a44-4fad-bee1-b2fad177b667-kube-api-access-6lw7s\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.197070 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.936518 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp"] Feb 16 14:46:30 crc kubenswrapper[4816]: I0216 14:46:30.985117 4816 scope.go:117] "RemoveContainer" containerID="0de02733857398068aa3c24fa496511019506eedf3b9f229d6bf0d8d6619eff0" Feb 16 14:46:31 crc kubenswrapper[4816]: I0216 14:46:31.017615 4816 scope.go:117] "RemoveContainer" containerID="9ddf753d75606d9d7f73cfc80af627b5bb57d147537106dfcf13e0f7b9ea0861" Feb 16 14:46:31 crc kubenswrapper[4816]: I0216 14:46:31.320558 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" event={"ID":"37cb37ad-8a44-4fad-bee1-b2fad177b667","Type":"ContainerStarted","Data":"1035ea0214c935c5fa6399924b22154fddf0ce0caa298f9875749a70e9ffad9d"} Feb 16 14:46:35 crc kubenswrapper[4816]: I0216 14:46:35.441434 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:46:35 crc kubenswrapper[4816]: E0216 14:46:35.442104 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:46:43 crc kubenswrapper[4816]: I0216 14:46:43.045785 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" event={"ID":"37cb37ad-8a44-4fad-bee1-b2fad177b667","Type":"ContainerStarted","Data":"b2f8dec9624887a00e2fd3948bc60c08b5df4616e13a2edec6a49f92ad673290"} Feb 16 14:46:43 crc kubenswrapper[4816]: I0216 14:46:43.065228 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" podStartSLOduration=2.397491044 podStartE2EDuration="14.065197638s" podCreationTimestamp="2026-02-16 14:46:29 +0000 UTC" firstStartedPulling="2026-02-16 14:46:30.937863441 +0000 UTC m=+6190.264577169" lastFinishedPulling="2026-02-16 14:46:42.605570035 +0000 UTC m=+6201.932283763" observedRunningTime="2026-02-16 14:46:43.060831299 +0000 UTC m=+6202.387545027" watchObservedRunningTime="2026-02-16 14:46:43.065197638 +0000 UTC m=+6202.391911366" Feb 16 14:46:49 crc kubenswrapper[4816]: I0216 14:46:49.399411 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:46:49 crc kubenswrapper[4816]: E0216 14:46:49.400648 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:46:56 crc kubenswrapper[4816]: I0216 14:46:56.173609 4816 generic.go:334] "Generic (PLEG): container finished" podID="37cb37ad-8a44-4fad-bee1-b2fad177b667" containerID="b2f8dec9624887a00e2fd3948bc60c08b5df4616e13a2edec6a49f92ad673290" exitCode=0 Feb 16 14:46:56 crc kubenswrapper[4816]: I0216 14:46:56.173685 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" event={"ID":"37cb37ad-8a44-4fad-bee1-b2fad177b667","Type":"ContainerDied","Data":"b2f8dec9624887a00e2fd3948bc60c08b5df4616e13a2edec6a49f92ad673290"} Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.647620 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.796199 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lw7s\" (UniqueName: \"kubernetes.io/projected/37cb37ad-8a44-4fad-bee1-b2fad177b667-kube-api-access-6lw7s\") pod \"37cb37ad-8a44-4fad-bee1-b2fad177b667\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.796326 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ssh-key-openstack-cell1\") pod \"37cb37ad-8a44-4fad-bee1-b2fad177b667\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.796425 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ceph\") pod \"37cb37ad-8a44-4fad-bee1-b2fad177b667\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.796572 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-inventory\") pod \"37cb37ad-8a44-4fad-bee1-b2fad177b667\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.796647 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-pre-adoption-validation-combined-ca-bundle\") pod \"37cb37ad-8a44-4fad-bee1-b2fad177b667\" (UID: \"37cb37ad-8a44-4fad-bee1-b2fad177b667\") " Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.802355 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37cb37ad-8a44-4fad-bee1-b2fad177b667-kube-api-access-6lw7s" (OuterVolumeSpecName: "kube-api-access-6lw7s") pod "37cb37ad-8a44-4fad-bee1-b2fad177b667" (UID: "37cb37ad-8a44-4fad-bee1-b2fad177b667"). InnerVolumeSpecName "kube-api-access-6lw7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.802625 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ceph" (OuterVolumeSpecName: "ceph") pod "37cb37ad-8a44-4fad-bee1-b2fad177b667" (UID: "37cb37ad-8a44-4fad-bee1-b2fad177b667"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.805802 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "37cb37ad-8a44-4fad-bee1-b2fad177b667" (UID: "37cb37ad-8a44-4fad-bee1-b2fad177b667"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.826138 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-inventory" (OuterVolumeSpecName: "inventory") pod "37cb37ad-8a44-4fad-bee1-b2fad177b667" (UID: "37cb37ad-8a44-4fad-bee1-b2fad177b667"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.829701 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "37cb37ad-8a44-4fad-bee1-b2fad177b667" (UID: "37cb37ad-8a44-4fad-bee1-b2fad177b667"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.899241 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.899272 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.899283 4816 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.899295 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lw7s\" (UniqueName: \"kubernetes.io/projected/37cb37ad-8a44-4fad-bee1-b2fad177b667-kube-api-access-6lw7s\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:57 crc kubenswrapper[4816]: I0216 14:46:57.899306 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/37cb37ad-8a44-4fad-bee1-b2fad177b667-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 14:46:58 crc kubenswrapper[4816]: I0216 14:46:58.195276 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" event={"ID":"37cb37ad-8a44-4fad-bee1-b2fad177b667","Type":"ContainerDied","Data":"1035ea0214c935c5fa6399924b22154fddf0ce0caa298f9875749a70e9ffad9d"} Feb 16 14:46:58 crc kubenswrapper[4816]: I0216 14:46:58.195340 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1035ea0214c935c5fa6399924b22154fddf0ce0caa298f9875749a70e9ffad9d" Feb 16 14:46:58 crc kubenswrapper[4816]: I0216 14:46:58.195368 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.230127 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4"] Feb 16 14:47:02 crc kubenswrapper[4816]: E0216 14:47:02.231252 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37cb37ad-8a44-4fad-bee1-b2fad177b667" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.231287 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="37cb37ad-8a44-4fad-bee1-b2fad177b667" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.231631 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="37cb37ad-8a44-4fad-bee1-b2fad177b667" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.232701 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.235806 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.236187 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.236246 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.236187 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.244756 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4"] Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.398341 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.398467 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.398548 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsz4p\" (UniqueName: \"kubernetes.io/projected/34f724c7-a493-4b35-8d7a-ae2ebb52353d-kube-api-access-fsz4p\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.399235 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.399398 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: E0216 14:47:02.399455 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.399464 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.501401 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.501455 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.501491 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsz4p\" (UniqueName: \"kubernetes.io/projected/34f724c7-a493-4b35-8d7a-ae2ebb52353d-kube-api-access-fsz4p\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.501628 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.501699 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.507587 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.508203 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.508613 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.513428 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.519280 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsz4p\" (UniqueName: \"kubernetes.io/projected/34f724c7-a493-4b35-8d7a-ae2ebb52353d-kube-api-access-fsz4p\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:02 crc kubenswrapper[4816]: I0216 14:47:02.566850 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:47:03 crc kubenswrapper[4816]: I0216 14:47:03.110428 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 14:47:03 crc kubenswrapper[4816]: I0216 14:47:03.114094 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4"] Feb 16 14:47:03 crc kubenswrapper[4816]: I0216 14:47:03.256952 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" event={"ID":"34f724c7-a493-4b35-8d7a-ae2ebb52353d","Type":"ContainerStarted","Data":"e3c57ef1338abf8e5ec134da0729e5b7ce4059f020fa5ad2a438f4fffbccda3b"} Feb 16 14:47:04 crc kubenswrapper[4816]: I0216 14:47:04.268519 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" event={"ID":"34f724c7-a493-4b35-8d7a-ae2ebb52353d","Type":"ContainerStarted","Data":"3ef92a8b08d8d34159ccde856c2780dbb281528258a69704c97cdca800f4dbdd"} Feb 16 14:47:04 crc kubenswrapper[4816]: I0216 14:47:04.300612 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" podStartSLOduration=1.7803657720000001 podStartE2EDuration="2.300572206s" podCreationTimestamp="2026-02-16 14:47:02 +0000 UTC" firstStartedPulling="2026-02-16 14:47:03.110196679 +0000 UTC m=+6222.436910407" lastFinishedPulling="2026-02-16 14:47:03.630403113 +0000 UTC m=+6222.957116841" observedRunningTime="2026-02-16 14:47:04.293428051 +0000 UTC m=+6223.620141789" watchObservedRunningTime="2026-02-16 14:47:04.300572206 +0000 UTC m=+6223.627285934" Feb 16 14:47:16 crc kubenswrapper[4816]: I0216 14:47:16.398403 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:47:16 crc kubenswrapper[4816]: E0216 14:47:16.399159 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:47:27 crc kubenswrapper[4816]: I0216 14:47:27.400025 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:47:27 crc kubenswrapper[4816]: E0216 14:47:27.401457 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:47:38 crc kubenswrapper[4816]: I0216 14:47:38.398958 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:47:38 crc kubenswrapper[4816]: E0216 14:47:38.400335 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:47:53 crc kubenswrapper[4816]: I0216 14:47:53.399379 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:47:53 crc kubenswrapper[4816]: E0216 14:47:53.400385 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:48:04 crc kubenswrapper[4816]: I0216 14:48:04.398739 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:48:04 crc kubenswrapper[4816]: E0216 14:48:04.399439 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.302863 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jqcf6"] Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.306249 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.314365 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jqcf6"] Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.476640 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-catalog-content\") pod \"redhat-marketplace-jqcf6\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.476727 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-657mt\" (UniqueName: \"kubernetes.io/projected/12371735-159b-47c0-be90-b4455c62f136-kube-api-access-657mt\") pod \"redhat-marketplace-jqcf6\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.476757 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-utilities\") pod \"redhat-marketplace-jqcf6\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.579267 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-catalog-content\") pod \"redhat-marketplace-jqcf6\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.579334 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-657mt\" (UniqueName: \"kubernetes.io/projected/12371735-159b-47c0-be90-b4455c62f136-kube-api-access-657mt\") pod \"redhat-marketplace-jqcf6\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.579377 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-utilities\") pod \"redhat-marketplace-jqcf6\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.579906 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-catalog-content\") pod \"redhat-marketplace-jqcf6\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.580117 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-utilities\") pod \"redhat-marketplace-jqcf6\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.598627 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-657mt\" (UniqueName: \"kubernetes.io/projected/12371735-159b-47c0-be90-b4455c62f136-kube-api-access-657mt\") pod \"redhat-marketplace-jqcf6\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:06 crc kubenswrapper[4816]: I0216 14:48:06.626675 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:07 crc kubenswrapper[4816]: I0216 14:48:07.096709 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jqcf6"] Feb 16 14:48:08 crc kubenswrapper[4816]: I0216 14:48:08.022793 4816 generic.go:334] "Generic (PLEG): container finished" podID="12371735-159b-47c0-be90-b4455c62f136" containerID="5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a" exitCode=0 Feb 16 14:48:08 crc kubenswrapper[4816]: I0216 14:48:08.023368 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jqcf6" event={"ID":"12371735-159b-47c0-be90-b4455c62f136","Type":"ContainerDied","Data":"5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a"} Feb 16 14:48:08 crc kubenswrapper[4816]: I0216 14:48:08.023636 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jqcf6" event={"ID":"12371735-159b-47c0-be90-b4455c62f136","Type":"ContainerStarted","Data":"4db7466902a0c80b3d48f3755332f45bae4756659dff03498631ac5a0a5bb66c"} Feb 16 14:48:10 crc kubenswrapper[4816]: I0216 14:48:10.047894 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jqcf6" event={"ID":"12371735-159b-47c0-be90-b4455c62f136","Type":"ContainerStarted","Data":"9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f"} Feb 16 14:48:11 crc kubenswrapper[4816]: I0216 14:48:11.059013 4816 generic.go:334] "Generic (PLEG): container finished" podID="12371735-159b-47c0-be90-b4455c62f136" containerID="9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f" exitCode=0 Feb 16 14:48:11 crc kubenswrapper[4816]: I0216 14:48:11.059108 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jqcf6" event={"ID":"12371735-159b-47c0-be90-b4455c62f136","Type":"ContainerDied","Data":"9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f"} Feb 16 14:48:12 crc kubenswrapper[4816]: I0216 14:48:12.071721 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jqcf6" event={"ID":"12371735-159b-47c0-be90-b4455c62f136","Type":"ContainerStarted","Data":"b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a"} Feb 16 14:48:12 crc kubenswrapper[4816]: I0216 14:48:12.111830 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jqcf6" podStartSLOduration=2.66365157 podStartE2EDuration="6.111808399s" podCreationTimestamp="2026-02-16 14:48:06 +0000 UTC" firstStartedPulling="2026-02-16 14:48:08.025193062 +0000 UTC m=+6287.351906790" lastFinishedPulling="2026-02-16 14:48:11.473349891 +0000 UTC m=+6290.800063619" observedRunningTime="2026-02-16 14:48:12.096009298 +0000 UTC m=+6291.422723076" watchObservedRunningTime="2026-02-16 14:48:12.111808399 +0000 UTC m=+6291.438522127" Feb 16 14:48:16 crc kubenswrapper[4816]: I0216 14:48:16.628045 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:16 crc kubenswrapper[4816]: I0216 14:48:16.628586 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:16 crc kubenswrapper[4816]: I0216 14:48:16.683237 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:17 crc kubenswrapper[4816]: I0216 14:48:17.184689 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:17 crc kubenswrapper[4816]: I0216 14:48:17.251702 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jqcf6"] Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.143517 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jqcf6" podUID="12371735-159b-47c0-be90-b4455c62f136" containerName="registry-server" containerID="cri-o://b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a" gracePeriod=2 Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.416793 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:48:19 crc kubenswrapper[4816]: E0216 14:48:19.417556 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.654673 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.793246 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-657mt\" (UniqueName: \"kubernetes.io/projected/12371735-159b-47c0-be90-b4455c62f136-kube-api-access-657mt\") pod \"12371735-159b-47c0-be90-b4455c62f136\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.793421 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-catalog-content\") pod \"12371735-159b-47c0-be90-b4455c62f136\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.793472 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-utilities\") pod \"12371735-159b-47c0-be90-b4455c62f136\" (UID: \"12371735-159b-47c0-be90-b4455c62f136\") " Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.794476 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-utilities" (OuterVolumeSpecName: "utilities") pod "12371735-159b-47c0-be90-b4455c62f136" (UID: "12371735-159b-47c0-be90-b4455c62f136"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.795532 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.800387 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12371735-159b-47c0-be90-b4455c62f136-kube-api-access-657mt" (OuterVolumeSpecName: "kube-api-access-657mt") pod "12371735-159b-47c0-be90-b4455c62f136" (UID: "12371735-159b-47c0-be90-b4455c62f136"). InnerVolumeSpecName "kube-api-access-657mt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.846927 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12371735-159b-47c0-be90-b4455c62f136" (UID: "12371735-159b-47c0-be90-b4455c62f136"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.898340 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-657mt\" (UniqueName: \"kubernetes.io/projected/12371735-159b-47c0-be90-b4455c62f136-kube-api-access-657mt\") on node \"crc\" DevicePath \"\"" Feb 16 14:48:19 crc kubenswrapper[4816]: I0216 14:48:19.898394 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12371735-159b-47c0-be90-b4455c62f136-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.157269 4816 generic.go:334] "Generic (PLEG): container finished" podID="12371735-159b-47c0-be90-b4455c62f136" containerID="b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a" exitCode=0 Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.157315 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jqcf6" event={"ID":"12371735-159b-47c0-be90-b4455c62f136","Type":"ContainerDied","Data":"b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a"} Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.157351 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jqcf6" event={"ID":"12371735-159b-47c0-be90-b4455c62f136","Type":"ContainerDied","Data":"4db7466902a0c80b3d48f3755332f45bae4756659dff03498631ac5a0a5bb66c"} Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.157370 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jqcf6" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.157373 4816 scope.go:117] "RemoveContainer" containerID="b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.184529 4816 scope.go:117] "RemoveContainer" containerID="9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.192535 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jqcf6"] Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.204337 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jqcf6"] Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.232928 4816 scope.go:117] "RemoveContainer" containerID="5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.288333 4816 scope.go:117] "RemoveContainer" containerID="b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a" Feb 16 14:48:20 crc kubenswrapper[4816]: E0216 14:48:20.290173 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a\": container with ID starting with b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a not found: ID does not exist" containerID="b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.290208 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a"} err="failed to get container status \"b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a\": rpc error: code = NotFound desc = could not find container \"b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a\": container with ID starting with b1dfcfef34ca51cdaa68ed12314535ab3b952421118fa59c9a42981583f99e7a not found: ID does not exist" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.290229 4816 scope.go:117] "RemoveContainer" containerID="9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f" Feb 16 14:48:20 crc kubenswrapper[4816]: E0216 14:48:20.290442 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f\": container with ID starting with 9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f not found: ID does not exist" containerID="9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.290455 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f"} err="failed to get container status \"9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f\": rpc error: code = NotFound desc = could not find container \"9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f\": container with ID starting with 9c1866457034d97e404d8bcd144940a76dd392289c50181bb8e9638d40b13a3f not found: ID does not exist" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.290468 4816 scope.go:117] "RemoveContainer" containerID="5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a" Feb 16 14:48:20 crc kubenswrapper[4816]: E0216 14:48:20.290628 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a\": container with ID starting with 5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a not found: ID does not exist" containerID="5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a" Feb 16 14:48:20 crc kubenswrapper[4816]: I0216 14:48:20.290641 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a"} err="failed to get container status \"5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a\": rpc error: code = NotFound desc = could not find container \"5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a\": container with ID starting with 5681a0bd0ff5008a97cbc59a2d22f922271303cb8d93bc6703fdc7ba7fb0246a not found: ID does not exist" Feb 16 14:48:21 crc kubenswrapper[4816]: I0216 14:48:21.088387 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-72x8m"] Feb 16 14:48:21 crc kubenswrapper[4816]: I0216 14:48:21.101354 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-72x8m"] Feb 16 14:48:21 crc kubenswrapper[4816]: I0216 14:48:21.413933 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12371735-159b-47c0-be90-b4455c62f136" path="/var/lib/kubelet/pods/12371735-159b-47c0-be90-b4455c62f136/volumes" Feb 16 14:48:21 crc kubenswrapper[4816]: I0216 14:48:21.415052 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6179ceaa-76f9-4804-80b5-27ffb9ee36c7" path="/var/lib/kubelet/pods/6179ceaa-76f9-4804-80b5-27ffb9ee36c7/volumes" Feb 16 14:48:22 crc kubenswrapper[4816]: I0216 14:48:22.038416 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-af00-account-create-update-gwzwc"] Feb 16 14:48:22 crc kubenswrapper[4816]: I0216 14:48:22.051765 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-af00-account-create-update-gwzwc"] Feb 16 14:48:23 crc kubenswrapper[4816]: I0216 14:48:23.412155 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61b25217-e3df-4ded-b750-c096a13f5577" path="/var/lib/kubelet/pods/61b25217-e3df-4ded-b750-c096a13f5577/volumes" Feb 16 14:48:27 crc kubenswrapper[4816]: I0216 14:48:27.046060 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-d954w"] Feb 16 14:48:27 crc kubenswrapper[4816]: I0216 14:48:27.058244 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-d954w"] Feb 16 14:48:27 crc kubenswrapper[4816]: I0216 14:48:27.410996 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58a861c3-95d2-47a1-a7b6-900d06b15b69" path="/var/lib/kubelet/pods/58a861c3-95d2-47a1-a7b6-900d06b15b69/volumes" Feb 16 14:48:28 crc kubenswrapper[4816]: I0216 14:48:28.062143 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-8072-account-create-update-24blg"] Feb 16 14:48:28 crc kubenswrapper[4816]: I0216 14:48:28.071379 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-8072-account-create-update-24blg"] Feb 16 14:48:29 crc kubenswrapper[4816]: I0216 14:48:29.410010 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9115f53-1e62-4807-b164-bd11a880da8f" path="/var/lib/kubelet/pods/f9115f53-1e62-4807-b164-bd11a880da8f/volumes" Feb 16 14:48:31 crc kubenswrapper[4816]: I0216 14:48:31.254070 4816 scope.go:117] "RemoveContainer" containerID="f279ca8b948a2178045ef5a1f80201d51396c52779181569d16bbe4ae5ad2540" Feb 16 14:48:31 crc kubenswrapper[4816]: I0216 14:48:31.292283 4816 scope.go:117] "RemoveContainer" containerID="a71981ce4c0a582e96123fa0180da803976bef32297bd0ac457e5582fb143729" Feb 16 14:48:31 crc kubenswrapper[4816]: I0216 14:48:31.360845 4816 scope.go:117] "RemoveContainer" containerID="8c4f450e9fc4402d80e6d99c33dc723507a9a4ba7ce445dc2ca6d3e71c41e13a" Feb 16 14:48:31 crc kubenswrapper[4816]: I0216 14:48:31.420076 4816 scope.go:117] "RemoveContainer" containerID="6bbc01c1daa9958fffaf66c03827757762ff2aea99dd476edc8410a8e20802b3" Feb 16 14:48:33 crc kubenswrapper[4816]: I0216 14:48:33.399019 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:48:33 crc kubenswrapper[4816]: E0216 14:48:33.400426 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:48:45 crc kubenswrapper[4816]: I0216 14:48:45.398339 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:48:46 crc kubenswrapper[4816]: I0216 14:48:46.503393 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"d322158d1b3315ea12a7fd73bdbe58cbc413262b210e55b274068761e16e9c83"} Feb 16 14:49:05 crc kubenswrapper[4816]: I0216 14:49:05.228408 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-clbqk"] Feb 16 14:49:05 crc kubenswrapper[4816]: I0216 14:49:05.239163 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-clbqk"] Feb 16 14:49:05 crc kubenswrapper[4816]: I0216 14:49:05.415190 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74071f7f-b4f9-4695-9ab0-26a6b9a5027a" path="/var/lib/kubelet/pods/74071f7f-b4f9-4695-9ab0-26a6b9a5027a/volumes" Feb 16 14:49:31 crc kubenswrapper[4816]: I0216 14:49:31.577829 4816 scope.go:117] "RemoveContainer" containerID="e6a31bdda7c1bebd32c3366596f020ee18fdd7694acfdcd04a4546ffae00cbf5" Feb 16 14:49:31 crc kubenswrapper[4816]: I0216 14:49:31.628057 4816 scope.go:117] "RemoveContainer" containerID="48097c4a62170ad5dd59aebf8f967d1801dc907d4fbac89f146c3a51cf08a921" Feb 16 14:50:31 crc kubenswrapper[4816]: I0216 14:50:31.735739 4816 scope.go:117] "RemoveContainer" containerID="db30d225aa918e044d0c722cd502ed3b2d477460d1191aa714090ff8478ee46c" Feb 16 14:50:31 crc kubenswrapper[4816]: I0216 14:50:31.780016 4816 scope.go:117] "RemoveContainer" containerID="2227a8de0db522f0a4da1180476f5f06a8f18153a3549ada2683ec9e13331abe" Feb 16 14:50:31 crc kubenswrapper[4816]: I0216 14:50:31.812179 4816 scope.go:117] "RemoveContainer" containerID="c100893e3aae34edc214ccf8a9793fd5bd2c803ee0e457296b9f1a4032af0539" Feb 16 14:50:31 crc kubenswrapper[4816]: I0216 14:50:31.841435 4816 scope.go:117] "RemoveContainer" containerID="1386843639fd2b9e8402e9702323d6d683191602f988941e531ee2cf29fb55d6" Feb 16 14:51:06 crc kubenswrapper[4816]: I0216 14:51:06.941053 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:51:06 crc kubenswrapper[4816]: I0216 14:51:06.941715 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:51:36 crc kubenswrapper[4816]: I0216 14:51:36.941142 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:51:36 crc kubenswrapper[4816]: I0216 14:51:36.941609 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:51:43 crc kubenswrapper[4816]: I0216 14:51:43.049333 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-e296-account-create-update-k4wgc"] Feb 16 14:51:43 crc kubenswrapper[4816]: I0216 14:51:43.059949 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-e296-account-create-update-k4wgc"] Feb 16 14:51:43 crc kubenswrapper[4816]: I0216 14:51:43.092814 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-ctdck"] Feb 16 14:51:43 crc kubenswrapper[4816]: I0216 14:51:43.101482 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-ctdck"] Feb 16 14:51:43 crc kubenswrapper[4816]: I0216 14:51:43.410223 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43c1f2ea-7117-4626-be6c-739c2a374880" path="/var/lib/kubelet/pods/43c1f2ea-7117-4626-be6c-739c2a374880/volumes" Feb 16 14:51:43 crc kubenswrapper[4816]: I0216 14:51:43.410955 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a14c9cb-3e89-467d-aacf-4d3941cc0a5c" path="/var/lib/kubelet/pods/5a14c9cb-3e89-467d-aacf-4d3941cc0a5c/volumes" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.495135 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n9557"] Feb 16 14:51:51 crc kubenswrapper[4816]: E0216 14:51:51.499378 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12371735-159b-47c0-be90-b4455c62f136" containerName="registry-server" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.499437 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="12371735-159b-47c0-be90-b4455c62f136" containerName="registry-server" Feb 16 14:51:51 crc kubenswrapper[4816]: E0216 14:51:51.499489 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12371735-159b-47c0-be90-b4455c62f136" containerName="extract-utilities" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.499498 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="12371735-159b-47c0-be90-b4455c62f136" containerName="extract-utilities" Feb 16 14:51:51 crc kubenswrapper[4816]: E0216 14:51:51.499513 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12371735-159b-47c0-be90-b4455c62f136" containerName="extract-content" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.499521 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="12371735-159b-47c0-be90-b4455c62f136" containerName="extract-content" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.500018 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="12371735-159b-47c0-be90-b4455c62f136" containerName="registry-server" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.503159 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.510740 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n9557"] Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.671938 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6qx8\" (UniqueName: \"kubernetes.io/projected/31e6de4e-0a47-4e54-beae-6c971f6997d0-kube-api-access-s6qx8\") pod \"redhat-operators-n9557\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.672454 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-utilities\") pod \"redhat-operators-n9557\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.672642 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-catalog-content\") pod \"redhat-operators-n9557\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.777866 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6qx8\" (UniqueName: \"kubernetes.io/projected/31e6de4e-0a47-4e54-beae-6c971f6997d0-kube-api-access-s6qx8\") pod \"redhat-operators-n9557\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.778249 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-utilities\") pod \"redhat-operators-n9557\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.778351 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-catalog-content\") pod \"redhat-operators-n9557\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.779394 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-catalog-content\") pod \"redhat-operators-n9557\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.780334 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-utilities\") pod \"redhat-operators-n9557\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.813264 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6qx8\" (UniqueName: \"kubernetes.io/projected/31e6de4e-0a47-4e54-beae-6c971f6997d0-kube-api-access-s6qx8\") pod \"redhat-operators-n9557\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:51 crc kubenswrapper[4816]: I0216 14:51:51.824037 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:51:52 crc kubenswrapper[4816]: W0216 14:51:52.174646 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31e6de4e_0a47_4e54_beae_6c971f6997d0.slice/crio-60a2b23aa68e1e533a3ff0c19bcfe22222c032a9bb90aa187ef0f468038ea3ed WatchSource:0}: Error finding container 60a2b23aa68e1e533a3ff0c19bcfe22222c032a9bb90aa187ef0f468038ea3ed: Status 404 returned error can't find the container with id 60a2b23aa68e1e533a3ff0c19bcfe22222c032a9bb90aa187ef0f468038ea3ed Feb 16 14:51:52 crc kubenswrapper[4816]: I0216 14:51:52.191376 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n9557"] Feb 16 14:51:52 crc kubenswrapper[4816]: I0216 14:51:52.884361 4816 generic.go:334] "Generic (PLEG): container finished" podID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerID="839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9" exitCode=0 Feb 16 14:51:52 crc kubenswrapper[4816]: I0216 14:51:52.884448 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9557" event={"ID":"31e6de4e-0a47-4e54-beae-6c971f6997d0","Type":"ContainerDied","Data":"839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9"} Feb 16 14:51:52 crc kubenswrapper[4816]: I0216 14:51:52.884757 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9557" event={"ID":"31e6de4e-0a47-4e54-beae-6c971f6997d0","Type":"ContainerStarted","Data":"60a2b23aa68e1e533a3ff0c19bcfe22222c032a9bb90aa187ef0f468038ea3ed"} Feb 16 14:51:54 crc kubenswrapper[4816]: I0216 14:51:54.902285 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9557" event={"ID":"31e6de4e-0a47-4e54-beae-6c971f6997d0","Type":"ContainerStarted","Data":"e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386"} Feb 16 14:51:57 crc kubenswrapper[4816]: I0216 14:51:57.057682 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-t2889"] Feb 16 14:51:57 crc kubenswrapper[4816]: I0216 14:51:57.070032 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-t2889"] Feb 16 14:51:57 crc kubenswrapper[4816]: I0216 14:51:57.411095 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d3c380d-f02f-42ef-95e0-0188dabec0f1" path="/var/lib/kubelet/pods/8d3c380d-f02f-42ef-95e0-0188dabec0f1/volumes" Feb 16 14:51:59 crc kubenswrapper[4816]: I0216 14:51:59.963163 4816 generic.go:334] "Generic (PLEG): container finished" podID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerID="e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386" exitCode=0 Feb 16 14:51:59 crc kubenswrapper[4816]: I0216 14:51:59.963270 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9557" event={"ID":"31e6de4e-0a47-4e54-beae-6c971f6997d0","Type":"ContainerDied","Data":"e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386"} Feb 16 14:52:01 crc kubenswrapper[4816]: I0216 14:52:01.006647 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9557" event={"ID":"31e6de4e-0a47-4e54-beae-6c971f6997d0","Type":"ContainerStarted","Data":"25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9"} Feb 16 14:52:01 crc kubenswrapper[4816]: I0216 14:52:01.045926 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n9557" podStartSLOduration=2.528102224 podStartE2EDuration="10.045878506s" podCreationTimestamp="2026-02-16 14:51:51 +0000 UTC" firstStartedPulling="2026-02-16 14:51:52.886239733 +0000 UTC m=+6512.212953471" lastFinishedPulling="2026-02-16 14:52:00.404016025 +0000 UTC m=+6519.730729753" observedRunningTime="2026-02-16 14:52:01.040264033 +0000 UTC m=+6520.366977771" watchObservedRunningTime="2026-02-16 14:52:01.045878506 +0000 UTC m=+6520.372592234" Feb 16 14:52:01 crc kubenswrapper[4816]: I0216 14:52:01.824178 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:52:01 crc kubenswrapper[4816]: I0216 14:52:01.825489 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:52:02 crc kubenswrapper[4816]: I0216 14:52:02.873152 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n9557" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerName="registry-server" probeResult="failure" output=< Feb 16 14:52:02 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 14:52:02 crc kubenswrapper[4816]: > Feb 16 14:52:06 crc kubenswrapper[4816]: I0216 14:52:06.941089 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:52:06 crc kubenswrapper[4816]: I0216 14:52:06.941611 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:52:06 crc kubenswrapper[4816]: I0216 14:52:06.941673 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:52:06 crc kubenswrapper[4816]: I0216 14:52:06.942486 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d322158d1b3315ea12a7fd73bdbe58cbc413262b210e55b274068761e16e9c83"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:52:06 crc kubenswrapper[4816]: I0216 14:52:06.942548 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://d322158d1b3315ea12a7fd73bdbe58cbc413262b210e55b274068761e16e9c83" gracePeriod=600 Feb 16 14:52:07 crc kubenswrapper[4816]: I0216 14:52:07.073822 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="d322158d1b3315ea12a7fd73bdbe58cbc413262b210e55b274068761e16e9c83" exitCode=0 Feb 16 14:52:07 crc kubenswrapper[4816]: I0216 14:52:07.073881 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"d322158d1b3315ea12a7fd73bdbe58cbc413262b210e55b274068761e16e9c83"} Feb 16 14:52:07 crc kubenswrapper[4816]: I0216 14:52:07.074192 4816 scope.go:117] "RemoveContainer" containerID="0de6f8a714f6be1da856dee4eb2fba5d344119abaa25e20800b6bf9ecbe12dd2" Feb 16 14:52:08 crc kubenswrapper[4816]: I0216 14:52:08.085623 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef"} Feb 16 14:52:12 crc kubenswrapper[4816]: I0216 14:52:12.257384 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:52:12 crc kubenswrapper[4816]: I0216 14:52:12.332427 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:52:13 crc kubenswrapper[4816]: I0216 14:52:13.266022 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n9557"] Feb 16 14:52:14 crc kubenswrapper[4816]: I0216 14:52:14.240844 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n9557" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerName="registry-server" containerID="cri-o://25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9" gracePeriod=2 Feb 16 14:52:14 crc kubenswrapper[4816]: I0216 14:52:14.764238 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:52:14 crc kubenswrapper[4816]: I0216 14:52:14.955825 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-catalog-content\") pod \"31e6de4e-0a47-4e54-beae-6c971f6997d0\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " Feb 16 14:52:14 crc kubenswrapper[4816]: I0216 14:52:14.955976 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-utilities\") pod \"31e6de4e-0a47-4e54-beae-6c971f6997d0\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " Feb 16 14:52:14 crc kubenswrapper[4816]: I0216 14:52:14.956043 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6qx8\" (UniqueName: \"kubernetes.io/projected/31e6de4e-0a47-4e54-beae-6c971f6997d0-kube-api-access-s6qx8\") pod \"31e6de4e-0a47-4e54-beae-6c971f6997d0\" (UID: \"31e6de4e-0a47-4e54-beae-6c971f6997d0\") " Feb 16 14:52:14 crc kubenswrapper[4816]: I0216 14:52:14.956858 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-utilities" (OuterVolumeSpecName: "utilities") pod "31e6de4e-0a47-4e54-beae-6c971f6997d0" (UID: "31e6de4e-0a47-4e54-beae-6c971f6997d0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:52:14 crc kubenswrapper[4816]: I0216 14:52:14.972333 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31e6de4e-0a47-4e54-beae-6c971f6997d0-kube-api-access-s6qx8" (OuterVolumeSpecName: "kube-api-access-s6qx8") pod "31e6de4e-0a47-4e54-beae-6c971f6997d0" (UID: "31e6de4e-0a47-4e54-beae-6c971f6997d0"). InnerVolumeSpecName "kube-api-access-s6qx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.058800 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.058837 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6qx8\" (UniqueName: \"kubernetes.io/projected/31e6de4e-0a47-4e54-beae-6c971f6997d0-kube-api-access-s6qx8\") on node \"crc\" DevicePath \"\"" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.102399 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31e6de4e-0a47-4e54-beae-6c971f6997d0" (UID: "31e6de4e-0a47-4e54-beae-6c971f6997d0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.160919 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e6de4e-0a47-4e54-beae-6c971f6997d0-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.251440 4816 generic.go:334] "Generic (PLEG): container finished" podID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerID="25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9" exitCode=0 Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.251489 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n9557" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.251491 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9557" event={"ID":"31e6de4e-0a47-4e54-beae-6c971f6997d0","Type":"ContainerDied","Data":"25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9"} Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.251597 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n9557" event={"ID":"31e6de4e-0a47-4e54-beae-6c971f6997d0","Type":"ContainerDied","Data":"60a2b23aa68e1e533a3ff0c19bcfe22222c032a9bb90aa187ef0f468038ea3ed"} Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.251622 4816 scope.go:117] "RemoveContainer" containerID="25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.290083 4816 scope.go:117] "RemoveContainer" containerID="e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.295627 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n9557"] Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.306434 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n9557"] Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.316363 4816 scope.go:117] "RemoveContainer" containerID="839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.367236 4816 scope.go:117] "RemoveContainer" containerID="25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9" Feb 16 14:52:15 crc kubenswrapper[4816]: E0216 14:52:15.367992 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9\": container with ID starting with 25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9 not found: ID does not exist" containerID="25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.368125 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9"} err="failed to get container status \"25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9\": rpc error: code = NotFound desc = could not find container \"25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9\": container with ID starting with 25113f99b0a46ae4c5a86772f533aa94dee8eec1ce0b4b8979f4c1a29b679dd9 not found: ID does not exist" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.368198 4816 scope.go:117] "RemoveContainer" containerID="e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386" Feb 16 14:52:15 crc kubenswrapper[4816]: E0216 14:52:15.368527 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386\": container with ID starting with e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386 not found: ID does not exist" containerID="e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.368571 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386"} err="failed to get container status \"e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386\": rpc error: code = NotFound desc = could not find container \"e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386\": container with ID starting with e6c821b913ede7377122b1c17cfed0ccaebddcceaf9b1f6752c0c8bb7c2a1386 not found: ID does not exist" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.368596 4816 scope.go:117] "RemoveContainer" containerID="839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9" Feb 16 14:52:15 crc kubenswrapper[4816]: E0216 14:52:15.368967 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9\": container with ID starting with 839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9 not found: ID does not exist" containerID="839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.368991 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9"} err="failed to get container status \"839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9\": rpc error: code = NotFound desc = could not find container \"839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9\": container with ID starting with 839e4a7145dedeaf24a2a3c2c98b02d469fd310e9b9df0a0a30b0f2368a99cc9 not found: ID does not exist" Feb 16 14:52:15 crc kubenswrapper[4816]: I0216 14:52:15.418721 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" path="/var/lib/kubelet/pods/31e6de4e-0a47-4e54-beae-6c971f6997d0/volumes" Feb 16 14:52:31 crc kubenswrapper[4816]: I0216 14:52:31.920057 4816 scope.go:117] "RemoveContainer" containerID="a93518da57cd504991169f0a1404404d12599b7618d21d0712da1a4a89d3efb6" Feb 16 14:52:31 crc kubenswrapper[4816]: I0216 14:52:31.951892 4816 scope.go:117] "RemoveContainer" containerID="ec06ede898c773315bf6c3ae55a789b3b4b67906001ceed3571080eb9427f0a8" Feb 16 14:52:32 crc kubenswrapper[4816]: I0216 14:52:32.028403 4816 scope.go:117] "RemoveContainer" containerID="3ea9e664aff27f834c8adc935c0ed68cbb3762177c75292fd2a58335230380ca" Feb 16 14:54:02 crc kubenswrapper[4816]: I0216 14:54:02.041870 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-20a6-account-create-update-w6mcg"] Feb 16 14:54:02 crc kubenswrapper[4816]: I0216 14:54:02.052694 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-20a6-account-create-update-w6mcg"] Feb 16 14:54:03 crc kubenswrapper[4816]: I0216 14:54:03.025702 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-w6ndj"] Feb 16 14:54:03 crc kubenswrapper[4816]: I0216 14:54:03.036581 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-w6ndj"] Feb 16 14:54:03 crc kubenswrapper[4816]: I0216 14:54:03.416919 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="979a1247-b91e-4f6d-9046-c772f11fb1ec" path="/var/lib/kubelet/pods/979a1247-b91e-4f6d-9046-c772f11fb1ec/volumes" Feb 16 14:54:03 crc kubenswrapper[4816]: I0216 14:54:03.418704 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe90291c-c1f0-4dec-b1a9-4ab410230979" path="/var/lib/kubelet/pods/fe90291c-c1f0-4dec-b1a9-4ab410230979/volumes" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.791175 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8sdjk"] Feb 16 14:54:09 crc kubenswrapper[4816]: E0216 14:54:09.793925 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerName="registry-server" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.794085 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerName="registry-server" Feb 16 14:54:09 crc kubenswrapper[4816]: E0216 14:54:09.794198 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerName="extract-utilities" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.794281 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerName="extract-utilities" Feb 16 14:54:09 crc kubenswrapper[4816]: E0216 14:54:09.794382 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerName="extract-content" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.794468 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerName="extract-content" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.794905 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="31e6de4e-0a47-4e54-beae-6c971f6997d0" containerName="registry-server" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.797587 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.833708 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8sdjk"] Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.865764 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-utilities\") pod \"community-operators-8sdjk\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.865930 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f92ht\" (UniqueName: \"kubernetes.io/projected/07573092-421e-4f65-829a-75e5b21e55bc-kube-api-access-f92ht\") pod \"community-operators-8sdjk\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.866021 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-catalog-content\") pod \"community-operators-8sdjk\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.968350 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-catalog-content\") pod \"community-operators-8sdjk\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.968457 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-utilities\") pod \"community-operators-8sdjk\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.968544 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f92ht\" (UniqueName: \"kubernetes.io/projected/07573092-421e-4f65-829a-75e5b21e55bc-kube-api-access-f92ht\") pod \"community-operators-8sdjk\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.969084 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-catalog-content\") pod \"community-operators-8sdjk\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.969178 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-utilities\") pod \"community-operators-8sdjk\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:09 crc kubenswrapper[4816]: I0216 14:54:09.991276 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f92ht\" (UniqueName: \"kubernetes.io/projected/07573092-421e-4f65-829a-75e5b21e55bc-kube-api-access-f92ht\") pod \"community-operators-8sdjk\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:10 crc kubenswrapper[4816]: I0216 14:54:10.137715 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:10 crc kubenswrapper[4816]: I0216 14:54:10.771364 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8sdjk"] Feb 16 14:54:10 crc kubenswrapper[4816]: W0216 14:54:10.776825 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07573092_421e_4f65_829a_75e5b21e55bc.slice/crio-9a5b3356a57ecbc31a450d5e64ff5bf2e37764d774d4ea7d5ae3234cdf2b020b WatchSource:0}: Error finding container 9a5b3356a57ecbc31a450d5e64ff5bf2e37764d774d4ea7d5ae3234cdf2b020b: Status 404 returned error can't find the container with id 9a5b3356a57ecbc31a450d5e64ff5bf2e37764d774d4ea7d5ae3234cdf2b020b Feb 16 14:54:10 crc kubenswrapper[4816]: I0216 14:54:10.792199 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8sdjk" event={"ID":"07573092-421e-4f65-829a-75e5b21e55bc","Type":"ContainerStarted","Data":"9a5b3356a57ecbc31a450d5e64ff5bf2e37764d774d4ea7d5ae3234cdf2b020b"} Feb 16 14:54:11 crc kubenswrapper[4816]: I0216 14:54:11.809287 4816 generic.go:334] "Generic (PLEG): container finished" podID="07573092-421e-4f65-829a-75e5b21e55bc" containerID="857f23303f1625e612e0ed19029bb692c304f1c519a893362a964192174f4bf9" exitCode=0 Feb 16 14:54:11 crc kubenswrapper[4816]: I0216 14:54:11.809405 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8sdjk" event={"ID":"07573092-421e-4f65-829a-75e5b21e55bc","Type":"ContainerDied","Data":"857f23303f1625e612e0ed19029bb692c304f1c519a893362a964192174f4bf9"} Feb 16 14:54:11 crc kubenswrapper[4816]: I0216 14:54:11.813469 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 14:54:12 crc kubenswrapper[4816]: I0216 14:54:12.826393 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8sdjk" event={"ID":"07573092-421e-4f65-829a-75e5b21e55bc","Type":"ContainerStarted","Data":"0528d891756480a552172f9aa02a3ed557cf4d8ab4d69521c6a22b3f9a916fc7"} Feb 16 14:54:14 crc kubenswrapper[4816]: I0216 14:54:14.852458 4816 generic.go:334] "Generic (PLEG): container finished" podID="07573092-421e-4f65-829a-75e5b21e55bc" containerID="0528d891756480a552172f9aa02a3ed557cf4d8ab4d69521c6a22b3f9a916fc7" exitCode=0 Feb 16 14:54:14 crc kubenswrapper[4816]: I0216 14:54:14.852504 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8sdjk" event={"ID":"07573092-421e-4f65-829a-75e5b21e55bc","Type":"ContainerDied","Data":"0528d891756480a552172f9aa02a3ed557cf4d8ab4d69521c6a22b3f9a916fc7"} Feb 16 14:54:15 crc kubenswrapper[4816]: I0216 14:54:15.869985 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8sdjk" event={"ID":"07573092-421e-4f65-829a-75e5b21e55bc","Type":"ContainerStarted","Data":"fef7d4edaacfcad5d0a6cca8f937a920f732c3185d439a458fb2f7086c73ca16"} Feb 16 14:54:15 crc kubenswrapper[4816]: I0216 14:54:15.902328 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8sdjk" podStartSLOduration=3.427698045 podStartE2EDuration="6.902269044s" podCreationTimestamp="2026-02-16 14:54:09 +0000 UTC" firstStartedPulling="2026-02-16 14:54:11.812972058 +0000 UTC m=+6651.139685826" lastFinishedPulling="2026-02-16 14:54:15.287543097 +0000 UTC m=+6654.614256825" observedRunningTime="2026-02-16 14:54:15.888298973 +0000 UTC m=+6655.215012721" watchObservedRunningTime="2026-02-16 14:54:15.902269044 +0000 UTC m=+6655.228982772" Feb 16 14:54:20 crc kubenswrapper[4816]: I0216 14:54:20.078825 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-8t7k2"] Feb 16 14:54:20 crc kubenswrapper[4816]: I0216 14:54:20.090131 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-8t7k2"] Feb 16 14:54:20 crc kubenswrapper[4816]: I0216 14:54:20.138895 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:20 crc kubenswrapper[4816]: I0216 14:54:20.139984 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:20 crc kubenswrapper[4816]: I0216 14:54:20.210821 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:20 crc kubenswrapper[4816]: I0216 14:54:20.998306 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:21 crc kubenswrapper[4816]: I0216 14:54:21.050809 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8sdjk"] Feb 16 14:54:21 crc kubenswrapper[4816]: I0216 14:54:21.424909 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c58e5974-3461-4e24-9576-60f97be5fcb3" path="/var/lib/kubelet/pods/c58e5974-3461-4e24-9576-60f97be5fcb3/volumes" Feb 16 14:54:22 crc kubenswrapper[4816]: I0216 14:54:22.953517 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8sdjk" podUID="07573092-421e-4f65-829a-75e5b21e55bc" containerName="registry-server" containerID="cri-o://fef7d4edaacfcad5d0a6cca8f937a920f732c3185d439a458fb2f7086c73ca16" gracePeriod=2 Feb 16 14:54:23 crc kubenswrapper[4816]: I0216 14:54:23.967105 4816 generic.go:334] "Generic (PLEG): container finished" podID="07573092-421e-4f65-829a-75e5b21e55bc" containerID="fef7d4edaacfcad5d0a6cca8f937a920f732c3185d439a458fb2f7086c73ca16" exitCode=0 Feb 16 14:54:23 crc kubenswrapper[4816]: I0216 14:54:23.967244 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8sdjk" event={"ID":"07573092-421e-4f65-829a-75e5b21e55bc","Type":"ContainerDied","Data":"fef7d4edaacfcad5d0a6cca8f937a920f732c3185d439a458fb2f7086c73ca16"} Feb 16 14:54:23 crc kubenswrapper[4816]: I0216 14:54:23.967455 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8sdjk" event={"ID":"07573092-421e-4f65-829a-75e5b21e55bc","Type":"ContainerDied","Data":"9a5b3356a57ecbc31a450d5e64ff5bf2e37764d774d4ea7d5ae3234cdf2b020b"} Feb 16 14:54:23 crc kubenswrapper[4816]: I0216 14:54:23.967480 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a5b3356a57ecbc31a450d5e64ff5bf2e37764d774d4ea7d5ae3234cdf2b020b" Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.002781 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.061564 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-utilities\") pod \"07573092-421e-4f65-829a-75e5b21e55bc\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.061704 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f92ht\" (UniqueName: \"kubernetes.io/projected/07573092-421e-4f65-829a-75e5b21e55bc-kube-api-access-f92ht\") pod \"07573092-421e-4f65-829a-75e5b21e55bc\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.061772 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-catalog-content\") pod \"07573092-421e-4f65-829a-75e5b21e55bc\" (UID: \"07573092-421e-4f65-829a-75e5b21e55bc\") " Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.063912 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-utilities" (OuterVolumeSpecName: "utilities") pod "07573092-421e-4f65-829a-75e5b21e55bc" (UID: "07573092-421e-4f65-829a-75e5b21e55bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.067935 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07573092-421e-4f65-829a-75e5b21e55bc-kube-api-access-f92ht" (OuterVolumeSpecName: "kube-api-access-f92ht") pod "07573092-421e-4f65-829a-75e5b21e55bc" (UID: "07573092-421e-4f65-829a-75e5b21e55bc"). InnerVolumeSpecName "kube-api-access-f92ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.129881 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07573092-421e-4f65-829a-75e5b21e55bc" (UID: "07573092-421e-4f65-829a-75e5b21e55bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.163872 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.163920 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f92ht\" (UniqueName: \"kubernetes.io/projected/07573092-421e-4f65-829a-75e5b21e55bc-kube-api-access-f92ht\") on node \"crc\" DevicePath \"\"" Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.163932 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07573092-421e-4f65-829a-75e5b21e55bc-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:54:24 crc kubenswrapper[4816]: I0216 14:54:24.982124 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8sdjk" Feb 16 14:54:25 crc kubenswrapper[4816]: I0216 14:54:25.045444 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8sdjk"] Feb 16 14:54:25 crc kubenswrapper[4816]: I0216 14:54:25.073602 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8sdjk"] Feb 16 14:54:25 crc kubenswrapper[4816]: I0216 14:54:25.413774 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07573092-421e-4f65-829a-75e5b21e55bc" path="/var/lib/kubelet/pods/07573092-421e-4f65-829a-75e5b21e55bc/volumes" Feb 16 14:54:32 crc kubenswrapper[4816]: I0216 14:54:32.168589 4816 scope.go:117] "RemoveContainer" containerID="8c92918eb2ed9c27fe25adf206d6b97fb8bb34de57375601747ea605b6c694a2" Feb 16 14:54:32 crc kubenswrapper[4816]: I0216 14:54:32.209074 4816 scope.go:117] "RemoveContainer" containerID="e2fb61a6c8f2ce7de57b093d558b02c6391fa10e125688bd79bed0f51155f8db" Feb 16 14:54:32 crc kubenswrapper[4816]: I0216 14:54:32.292531 4816 scope.go:117] "RemoveContainer" containerID="3cbf90d198e46988a3d799b6f31a0bdd69680c50178712411b495376ed179ff4" Feb 16 14:54:36 crc kubenswrapper[4816]: I0216 14:54:36.940763 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:54:36 crc kubenswrapper[4816]: I0216 14:54:36.943017 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:54:42 crc kubenswrapper[4816]: I0216 14:54:42.035501 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-2xbmj"] Feb 16 14:54:42 crc kubenswrapper[4816]: I0216 14:54:42.047752 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-a628-account-create-update-hkxdn"] Feb 16 14:54:42 crc kubenswrapper[4816]: I0216 14:54:42.058022 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-2xbmj"] Feb 16 14:54:42 crc kubenswrapper[4816]: I0216 14:54:42.068176 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-a628-account-create-update-hkxdn"] Feb 16 14:54:43 crc kubenswrapper[4816]: I0216 14:54:43.410675 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b6b729a-9df6-47f0-847a-1c16cf3a3f25" path="/var/lib/kubelet/pods/4b6b729a-9df6-47f0-847a-1c16cf3a3f25/volumes" Feb 16 14:54:43 crc kubenswrapper[4816]: I0216 14:54:43.411797 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="747ea09f-2f99-4b20-ae42-49a8fbbd1c56" path="/var/lib/kubelet/pods/747ea09f-2f99-4b20-ae42-49a8fbbd1c56/volumes" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.420368 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9b42s"] Feb 16 14:54:50 crc kubenswrapper[4816]: E0216 14:54:50.421240 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07573092-421e-4f65-829a-75e5b21e55bc" containerName="extract-content" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.421254 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="07573092-421e-4f65-829a-75e5b21e55bc" containerName="extract-content" Feb 16 14:54:50 crc kubenswrapper[4816]: E0216 14:54:50.421282 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07573092-421e-4f65-829a-75e5b21e55bc" containerName="registry-server" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.421292 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="07573092-421e-4f65-829a-75e5b21e55bc" containerName="registry-server" Feb 16 14:54:50 crc kubenswrapper[4816]: E0216 14:54:50.421320 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07573092-421e-4f65-829a-75e5b21e55bc" containerName="extract-utilities" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.421330 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="07573092-421e-4f65-829a-75e5b21e55bc" containerName="extract-utilities" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.421558 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="07573092-421e-4f65-829a-75e5b21e55bc" containerName="registry-server" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.423432 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.451395 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9b42s"] Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.613470 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9t2z\" (UniqueName: \"kubernetes.io/projected/e0d04829-acf8-4bea-84bf-b3407b600db5-kube-api-access-b9t2z\") pod \"certified-operators-9b42s\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.613818 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-utilities\") pod \"certified-operators-9b42s\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.613936 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-catalog-content\") pod \"certified-operators-9b42s\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.715908 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-utilities\") pod \"certified-operators-9b42s\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.715989 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-catalog-content\") pod \"certified-operators-9b42s\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.716098 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9t2z\" (UniqueName: \"kubernetes.io/projected/e0d04829-acf8-4bea-84bf-b3407b600db5-kube-api-access-b9t2z\") pod \"certified-operators-9b42s\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.716498 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-utilities\") pod \"certified-operators-9b42s\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.716571 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-catalog-content\") pod \"certified-operators-9b42s\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.734060 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9t2z\" (UniqueName: \"kubernetes.io/projected/e0d04829-acf8-4bea-84bf-b3407b600db5-kube-api-access-b9t2z\") pod \"certified-operators-9b42s\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:50 crc kubenswrapper[4816]: I0216 14:54:50.750374 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:54:51 crc kubenswrapper[4816]: I0216 14:54:51.252307 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9b42s"] Feb 16 14:54:51 crc kubenswrapper[4816]: I0216 14:54:51.278490 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9b42s" event={"ID":"e0d04829-acf8-4bea-84bf-b3407b600db5","Type":"ContainerStarted","Data":"516e3eda74b05ccf78d5d797c9b0843c38b6608ae07edffcb9f6d200aaa48465"} Feb 16 14:54:52 crc kubenswrapper[4816]: I0216 14:54:52.307052 4816 generic.go:334] "Generic (PLEG): container finished" podID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerID="c9783fec1bd4cc6085d5084ac5e1e5a62856c3613ee5cdff9cab5ca19505c435" exitCode=0 Feb 16 14:54:52 crc kubenswrapper[4816]: I0216 14:54:52.307193 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9b42s" event={"ID":"e0d04829-acf8-4bea-84bf-b3407b600db5","Type":"ContainerDied","Data":"c9783fec1bd4cc6085d5084ac5e1e5a62856c3613ee5cdff9cab5ca19505c435"} Feb 16 14:54:53 crc kubenswrapper[4816]: I0216 14:54:53.318945 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9b42s" event={"ID":"e0d04829-acf8-4bea-84bf-b3407b600db5","Type":"ContainerStarted","Data":"470ed93b9b1b115188b32bd07c805f4088be6fa732b8c30bae37359850aac113"} Feb 16 14:54:54 crc kubenswrapper[4816]: I0216 14:54:54.041972 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-pqn4j"] Feb 16 14:54:54 crc kubenswrapper[4816]: I0216 14:54:54.051647 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-pqn4j"] Feb 16 14:54:55 crc kubenswrapper[4816]: I0216 14:54:55.339279 4816 generic.go:334] "Generic (PLEG): container finished" podID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerID="470ed93b9b1b115188b32bd07c805f4088be6fa732b8c30bae37359850aac113" exitCode=0 Feb 16 14:54:55 crc kubenswrapper[4816]: I0216 14:54:55.339694 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9b42s" event={"ID":"e0d04829-acf8-4bea-84bf-b3407b600db5","Type":"ContainerDied","Data":"470ed93b9b1b115188b32bd07c805f4088be6fa732b8c30bae37359850aac113"} Feb 16 14:54:55 crc kubenswrapper[4816]: I0216 14:54:55.415733 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd3b7c80-9ea3-48c6-91fc-947d0315b206" path="/var/lib/kubelet/pods/cd3b7c80-9ea3-48c6-91fc-947d0315b206/volumes" Feb 16 14:54:56 crc kubenswrapper[4816]: I0216 14:54:56.356427 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9b42s" event={"ID":"e0d04829-acf8-4bea-84bf-b3407b600db5","Type":"ContainerStarted","Data":"e3389ef428d502ce305a96483ca73a5fef05e6c67e7e1c7369558d62ec65c6d7"} Feb 16 14:54:56 crc kubenswrapper[4816]: I0216 14:54:56.394365 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9b42s" podStartSLOduration=2.905666116 podStartE2EDuration="6.394334609s" podCreationTimestamp="2026-02-16 14:54:50 +0000 UTC" firstStartedPulling="2026-02-16 14:54:52.313807583 +0000 UTC m=+6691.640521351" lastFinishedPulling="2026-02-16 14:54:55.802476116 +0000 UTC m=+6695.129189844" observedRunningTime="2026-02-16 14:54:56.392696635 +0000 UTC m=+6695.719410443" watchObservedRunningTime="2026-02-16 14:54:56.394334609 +0000 UTC m=+6695.721048327" Feb 16 14:55:00 crc kubenswrapper[4816]: I0216 14:55:00.750624 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:55:00 crc kubenswrapper[4816]: I0216 14:55:00.751504 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:55:00 crc kubenswrapper[4816]: I0216 14:55:00.832999 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:55:01 crc kubenswrapper[4816]: I0216 14:55:01.497991 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:55:01 crc kubenswrapper[4816]: I0216 14:55:01.541793 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9b42s"] Feb 16 14:55:03 crc kubenswrapper[4816]: I0216 14:55:03.439066 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9b42s" podUID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerName="registry-server" containerID="cri-o://e3389ef428d502ce305a96483ca73a5fef05e6c67e7e1c7369558d62ec65c6d7" gracePeriod=2 Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.449724 4816 generic.go:334] "Generic (PLEG): container finished" podID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerID="e3389ef428d502ce305a96483ca73a5fef05e6c67e7e1c7369558d62ec65c6d7" exitCode=0 Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.449824 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9b42s" event={"ID":"e0d04829-acf8-4bea-84bf-b3407b600db5","Type":"ContainerDied","Data":"e3389ef428d502ce305a96483ca73a5fef05e6c67e7e1c7369558d62ec65c6d7"} Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.562624 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.653569 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-utilities\") pod \"e0d04829-acf8-4bea-84bf-b3407b600db5\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.653774 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9t2z\" (UniqueName: \"kubernetes.io/projected/e0d04829-acf8-4bea-84bf-b3407b600db5-kube-api-access-b9t2z\") pod \"e0d04829-acf8-4bea-84bf-b3407b600db5\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.653860 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-catalog-content\") pod \"e0d04829-acf8-4bea-84bf-b3407b600db5\" (UID: \"e0d04829-acf8-4bea-84bf-b3407b600db5\") " Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.654749 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-utilities" (OuterVolumeSpecName: "utilities") pod "e0d04829-acf8-4bea-84bf-b3407b600db5" (UID: "e0d04829-acf8-4bea-84bf-b3407b600db5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.661476 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0d04829-acf8-4bea-84bf-b3407b600db5-kube-api-access-b9t2z" (OuterVolumeSpecName: "kube-api-access-b9t2z") pod "e0d04829-acf8-4bea-84bf-b3407b600db5" (UID: "e0d04829-acf8-4bea-84bf-b3407b600db5"). InnerVolumeSpecName "kube-api-access-b9t2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.713290 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e0d04829-acf8-4bea-84bf-b3407b600db5" (UID: "e0d04829-acf8-4bea-84bf-b3407b600db5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.757026 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9t2z\" (UniqueName: \"kubernetes.io/projected/e0d04829-acf8-4bea-84bf-b3407b600db5-kube-api-access-b9t2z\") on node \"crc\" DevicePath \"\"" Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.757058 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:55:04 crc kubenswrapper[4816]: I0216 14:55:04.757068 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d04829-acf8-4bea-84bf-b3407b600db5-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:55:05 crc kubenswrapper[4816]: I0216 14:55:05.461504 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9b42s" event={"ID":"e0d04829-acf8-4bea-84bf-b3407b600db5","Type":"ContainerDied","Data":"516e3eda74b05ccf78d5d797c9b0843c38b6608ae07edffcb9f6d200aaa48465"} Feb 16 14:55:05 crc kubenswrapper[4816]: I0216 14:55:05.461549 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9b42s" Feb 16 14:55:05 crc kubenswrapper[4816]: I0216 14:55:05.461575 4816 scope.go:117] "RemoveContainer" containerID="e3389ef428d502ce305a96483ca73a5fef05e6c67e7e1c7369558d62ec65c6d7" Feb 16 14:55:05 crc kubenswrapper[4816]: I0216 14:55:05.490189 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9b42s"] Feb 16 14:55:05 crc kubenswrapper[4816]: I0216 14:55:05.494635 4816 scope.go:117] "RemoveContainer" containerID="470ed93b9b1b115188b32bd07c805f4088be6fa732b8c30bae37359850aac113" Feb 16 14:55:05 crc kubenswrapper[4816]: I0216 14:55:05.499194 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9b42s"] Feb 16 14:55:05 crc kubenswrapper[4816]: I0216 14:55:05.527656 4816 scope.go:117] "RemoveContainer" containerID="c9783fec1bd4cc6085d5084ac5e1e5a62856c3613ee5cdff9cab5ca19505c435" Feb 16 14:55:06 crc kubenswrapper[4816]: I0216 14:55:06.940835 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:55:06 crc kubenswrapper[4816]: I0216 14:55:06.941901 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:55:07 crc kubenswrapper[4816]: I0216 14:55:07.414640 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0d04829-acf8-4bea-84bf-b3407b600db5" path="/var/lib/kubelet/pods/e0d04829-acf8-4bea-84bf-b3407b600db5/volumes" Feb 16 14:55:32 crc kubenswrapper[4816]: I0216 14:55:32.413124 4816 scope.go:117] "RemoveContainer" containerID="31eb500f8c9309455b1f72031c2ce1da19ef6bec4306bacd0a29c504c13c229d" Feb 16 14:55:32 crc kubenswrapper[4816]: I0216 14:55:32.454625 4816 scope.go:117] "RemoveContainer" containerID="d670ac68c03d876eeb2cc36d9b12eefb455db6144ef754274c63e72bf27d6869" Feb 16 14:55:32 crc kubenswrapper[4816]: I0216 14:55:32.545254 4816 scope.go:117] "RemoveContainer" containerID="8d60c16420dc20619141aca49ac52c4581b2124329b25c3b05466253750af2d6" Feb 16 14:55:36 crc kubenswrapper[4816]: I0216 14:55:36.940941 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 14:55:36 crc kubenswrapper[4816]: I0216 14:55:36.941542 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 14:55:36 crc kubenswrapper[4816]: I0216 14:55:36.941595 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 14:55:36 crc kubenswrapper[4816]: I0216 14:55:36.942365 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 14:55:36 crc kubenswrapper[4816]: I0216 14:55:36.942429 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" gracePeriod=600 Feb 16 14:55:37 crc kubenswrapper[4816]: E0216 14:55:37.068121 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:55:37 crc kubenswrapper[4816]: I0216 14:55:37.827160 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" exitCode=0 Feb 16 14:55:37 crc kubenswrapper[4816]: I0216 14:55:37.827469 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef"} Feb 16 14:55:37 crc kubenswrapper[4816]: I0216 14:55:37.827599 4816 scope.go:117] "RemoveContainer" containerID="d322158d1b3315ea12a7fd73bdbe58cbc413262b210e55b274068761e16e9c83" Feb 16 14:55:37 crc kubenswrapper[4816]: I0216 14:55:37.828598 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:55:37 crc kubenswrapper[4816]: E0216 14:55:37.829054 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:55:52 crc kubenswrapper[4816]: I0216 14:55:52.398981 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:55:52 crc kubenswrapper[4816]: E0216 14:55:52.400112 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:56:03 crc kubenswrapper[4816]: I0216 14:56:03.400568 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:56:03 crc kubenswrapper[4816]: E0216 14:56:03.401859 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:56:16 crc kubenswrapper[4816]: I0216 14:56:16.398594 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:56:16 crc kubenswrapper[4816]: E0216 14:56:16.399714 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:56:28 crc kubenswrapper[4816]: I0216 14:56:28.399685 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:56:28 crc kubenswrapper[4816]: E0216 14:56:28.400292 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:56:42 crc kubenswrapper[4816]: I0216 14:56:42.399584 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:56:42 crc kubenswrapper[4816]: E0216 14:56:42.401112 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:56:53 crc kubenswrapper[4816]: I0216 14:56:53.404873 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:56:53 crc kubenswrapper[4816]: E0216 14:56:53.406035 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:57:08 crc kubenswrapper[4816]: I0216 14:57:08.399598 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:57:08 crc kubenswrapper[4816]: E0216 14:57:08.402322 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:57:22 crc kubenswrapper[4816]: I0216 14:57:22.398996 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:57:22 crc kubenswrapper[4816]: E0216 14:57:22.399637 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:57:36 crc kubenswrapper[4816]: I0216 14:57:36.398175 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:57:36 crc kubenswrapper[4816]: E0216 14:57:36.399096 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:57:49 crc kubenswrapper[4816]: I0216 14:57:49.398524 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:57:49 crc kubenswrapper[4816]: E0216 14:57:49.399354 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:57:57 crc kubenswrapper[4816]: I0216 14:57:57.088885 4816 generic.go:334] "Generic (PLEG): container finished" podID="34f724c7-a493-4b35-8d7a-ae2ebb52353d" containerID="3ef92a8b08d8d34159ccde856c2780dbb281528258a69704c97cdca800f4dbdd" exitCode=0 Feb 16 14:57:57 crc kubenswrapper[4816]: I0216 14:57:57.088975 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" event={"ID":"34f724c7-a493-4b35-8d7a-ae2ebb52353d","Type":"ContainerDied","Data":"3ef92a8b08d8d34159ccde856c2780dbb281528258a69704c97cdca800f4dbdd"} Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.568444 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.592145 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsz4p\" (UniqueName: \"kubernetes.io/projected/34f724c7-a493-4b35-8d7a-ae2ebb52353d-kube-api-access-fsz4p\") pod \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.592275 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ceph\") pod \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.592315 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-tripleo-cleanup-combined-ca-bundle\") pod \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.592345 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ssh-key-openstack-cell1\") pod \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.592542 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-inventory\") pod \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\" (UID: \"34f724c7-a493-4b35-8d7a-ae2ebb52353d\") " Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.599815 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34f724c7-a493-4b35-8d7a-ae2ebb52353d-kube-api-access-fsz4p" (OuterVolumeSpecName: "kube-api-access-fsz4p") pod "34f724c7-a493-4b35-8d7a-ae2ebb52353d" (UID: "34f724c7-a493-4b35-8d7a-ae2ebb52353d"). InnerVolumeSpecName "kube-api-access-fsz4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.600464 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "34f724c7-a493-4b35-8d7a-ae2ebb52353d" (UID: "34f724c7-a493-4b35-8d7a-ae2ebb52353d"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.600533 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ceph" (OuterVolumeSpecName: "ceph") pod "34f724c7-a493-4b35-8d7a-ae2ebb52353d" (UID: "34f724c7-a493-4b35-8d7a-ae2ebb52353d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.630745 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-inventory" (OuterVolumeSpecName: "inventory") pod "34f724c7-a493-4b35-8d7a-ae2ebb52353d" (UID: "34f724c7-a493-4b35-8d7a-ae2ebb52353d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.633715 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "34f724c7-a493-4b35-8d7a-ae2ebb52353d" (UID: "34f724c7-a493-4b35-8d7a-ae2ebb52353d"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.695896 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsz4p\" (UniqueName: \"kubernetes.io/projected/34f724c7-a493-4b35-8d7a-ae2ebb52353d-kube-api-access-fsz4p\") on node \"crc\" DevicePath \"\"" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.696253 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.696268 4816 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.696286 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 14:57:58 crc kubenswrapper[4816]: I0216 14:57:58.696299 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34f724c7-a493-4b35-8d7a-ae2ebb52353d-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 14:57:59 crc kubenswrapper[4816]: I0216 14:57:59.119712 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" event={"ID":"34f724c7-a493-4b35-8d7a-ae2ebb52353d","Type":"ContainerDied","Data":"e3c57ef1338abf8e5ec134da0729e5b7ce4059f020fa5ad2a438f4fffbccda3b"} Feb 16 14:57:59 crc kubenswrapper[4816]: I0216 14:57:59.120024 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3c57ef1338abf8e5ec134da0729e5b7ce4059f020fa5ad2a438f4fffbccda3b" Feb 16 14:57:59 crc kubenswrapper[4816]: I0216 14:57:59.119818 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.089845 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-kdxx4"] Feb 16 14:58:01 crc kubenswrapper[4816]: E0216 14:58:01.090627 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerName="extract-content" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.090650 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerName="extract-content" Feb 16 14:58:01 crc kubenswrapper[4816]: E0216 14:58:01.090710 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerName="extract-utilities" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.090717 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerName="extract-utilities" Feb 16 14:58:01 crc kubenswrapper[4816]: E0216 14:58:01.090727 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerName="registry-server" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.090732 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerName="registry-server" Feb 16 14:58:01 crc kubenswrapper[4816]: E0216 14:58:01.090749 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f724c7-a493-4b35-8d7a-ae2ebb52353d" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.090757 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f724c7-a493-4b35-8d7a-ae2ebb52353d" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.090964 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0d04829-acf8-4bea-84bf-b3407b600db5" containerName="registry-server" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.090982 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="34f724c7-a493-4b35-8d7a-ae2ebb52353d" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.091863 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.094920 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.095064 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.095417 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.096521 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.100285 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-kdxx4"] Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.259362 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.259512 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.259624 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-inventory\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.259734 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ceph\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.259821 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s55tt\" (UniqueName: \"kubernetes.io/projected/6618881a-03b6-4eec-b4e5-850bbce3a0ce-kube-api-access-s55tt\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.362377 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s55tt\" (UniqueName: \"kubernetes.io/projected/6618881a-03b6-4eec-b4e5-850bbce3a0ce-kube-api-access-s55tt\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.362601 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.362939 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.363157 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-inventory\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.363309 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ceph\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.367950 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.370615 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ceph\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.370940 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-inventory\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.376171 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.384938 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s55tt\" (UniqueName: \"kubernetes.io/projected/6618881a-03b6-4eec-b4e5-850bbce3a0ce-kube-api-access-s55tt\") pod \"bootstrap-openstack-openstack-cell1-kdxx4\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.409236 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 14:58:01 crc kubenswrapper[4816]: I0216 14:58:01.957515 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-kdxx4"] Feb 16 14:58:01 crc kubenswrapper[4816]: W0216 14:58:01.958735 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6618881a_03b6_4eec_b4e5_850bbce3a0ce.slice/crio-8529a7d9ccb651962ba6539d27c620bc3fc3ac5592c8cba9f86abd96239cb83c WatchSource:0}: Error finding container 8529a7d9ccb651962ba6539d27c620bc3fc3ac5592c8cba9f86abd96239cb83c: Status 404 returned error can't find the container with id 8529a7d9ccb651962ba6539d27c620bc3fc3ac5592c8cba9f86abd96239cb83c Feb 16 14:58:02 crc kubenswrapper[4816]: I0216 14:58:02.158388 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" event={"ID":"6618881a-03b6-4eec-b4e5-850bbce3a0ce","Type":"ContainerStarted","Data":"8529a7d9ccb651962ba6539d27c620bc3fc3ac5592c8cba9f86abd96239cb83c"} Feb 16 14:58:02 crc kubenswrapper[4816]: I0216 14:58:02.399437 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:58:02 crc kubenswrapper[4816]: E0216 14:58:02.399713 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:58:03 crc kubenswrapper[4816]: I0216 14:58:03.168405 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" event={"ID":"6618881a-03b6-4eec-b4e5-850bbce3a0ce","Type":"ContainerStarted","Data":"a8d36f91e3cb786f094fd5fda9b273352090d650dccbd83993b61c3229c0ee47"} Feb 16 14:58:03 crc kubenswrapper[4816]: I0216 14:58:03.198700 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" podStartSLOduration=1.822785205 podStartE2EDuration="2.19864556s" podCreationTimestamp="2026-02-16 14:58:01 +0000 UTC" firstStartedPulling="2026-02-16 14:58:01.96120756 +0000 UTC m=+6881.287921298" lastFinishedPulling="2026-02-16 14:58:02.337067925 +0000 UTC m=+6881.663781653" observedRunningTime="2026-02-16 14:58:03.193688935 +0000 UTC m=+6882.520402713" watchObservedRunningTime="2026-02-16 14:58:03.19864556 +0000 UTC m=+6882.525359288" Feb 16 14:58:15 crc kubenswrapper[4816]: I0216 14:58:15.398774 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:58:15 crc kubenswrapper[4816]: E0216 14:58:15.399525 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:58:29 crc kubenswrapper[4816]: I0216 14:58:29.399708 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:58:29 crc kubenswrapper[4816]: E0216 14:58:29.400984 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:58:43 crc kubenswrapper[4816]: I0216 14:58:43.399747 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:58:43 crc kubenswrapper[4816]: E0216 14:58:43.402229 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.658763 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6flgb"] Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.661636 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.689607 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-catalog-content\") pod \"redhat-marketplace-6flgb\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.689767 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njd9r\" (UniqueName: \"kubernetes.io/projected/e2016268-8c82-4436-86a5-2c245260e96b-kube-api-access-njd9r\") pod \"redhat-marketplace-6flgb\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.689856 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-utilities\") pod \"redhat-marketplace-6flgb\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.690090 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6flgb"] Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.790853 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-catalog-content\") pod \"redhat-marketplace-6flgb\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.790923 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njd9r\" (UniqueName: \"kubernetes.io/projected/e2016268-8c82-4436-86a5-2c245260e96b-kube-api-access-njd9r\") pod \"redhat-marketplace-6flgb\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.791010 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-utilities\") pod \"redhat-marketplace-6flgb\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.791427 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-catalog-content\") pod \"redhat-marketplace-6flgb\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.791499 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-utilities\") pod \"redhat-marketplace-6flgb\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.826553 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njd9r\" (UniqueName: \"kubernetes.io/projected/e2016268-8c82-4436-86a5-2c245260e96b-kube-api-access-njd9r\") pod \"redhat-marketplace-6flgb\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:53 crc kubenswrapper[4816]: I0216 14:58:53.988832 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:58:54 crc kubenswrapper[4816]: I0216 14:58:54.476037 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6flgb"] Feb 16 14:58:54 crc kubenswrapper[4816]: I0216 14:58:54.774763 4816 generic.go:334] "Generic (PLEG): container finished" podID="e2016268-8c82-4436-86a5-2c245260e96b" containerID="062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d" exitCode=0 Feb 16 14:58:54 crc kubenswrapper[4816]: I0216 14:58:54.774896 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6flgb" event={"ID":"e2016268-8c82-4436-86a5-2c245260e96b","Type":"ContainerDied","Data":"062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d"} Feb 16 14:58:54 crc kubenswrapper[4816]: I0216 14:58:54.775095 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6flgb" event={"ID":"e2016268-8c82-4436-86a5-2c245260e96b","Type":"ContainerStarted","Data":"bedd6e529bb895cb0306d14bb8979b33e68cc18ba409016bcc932d86a6d13299"} Feb 16 14:58:55 crc kubenswrapper[4816]: I0216 14:58:55.449547 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:58:55 crc kubenswrapper[4816]: E0216 14:58:55.453983 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:58:56 crc kubenswrapper[4816]: I0216 14:58:56.801450 4816 generic.go:334] "Generic (PLEG): container finished" podID="e2016268-8c82-4436-86a5-2c245260e96b" containerID="1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f" exitCode=0 Feb 16 14:58:56 crc kubenswrapper[4816]: I0216 14:58:56.801905 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6flgb" event={"ID":"e2016268-8c82-4436-86a5-2c245260e96b","Type":"ContainerDied","Data":"1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f"} Feb 16 14:58:57 crc kubenswrapper[4816]: I0216 14:58:57.817637 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6flgb" event={"ID":"e2016268-8c82-4436-86a5-2c245260e96b","Type":"ContainerStarted","Data":"36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac"} Feb 16 14:58:57 crc kubenswrapper[4816]: I0216 14:58:57.844293 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6flgb" podStartSLOduration=2.405488606 podStartE2EDuration="4.844275771s" podCreationTimestamp="2026-02-16 14:58:53 +0000 UTC" firstStartedPulling="2026-02-16 14:58:54.776941723 +0000 UTC m=+6934.103655451" lastFinishedPulling="2026-02-16 14:58:57.215728878 +0000 UTC m=+6936.542442616" observedRunningTime="2026-02-16 14:58:57.839769258 +0000 UTC m=+6937.166482996" watchObservedRunningTime="2026-02-16 14:58:57.844275771 +0000 UTC m=+6937.170989499" Feb 16 14:59:03 crc kubenswrapper[4816]: I0216 14:59:03.989388 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:59:03 crc kubenswrapper[4816]: I0216 14:59:03.990213 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:59:04 crc kubenswrapper[4816]: I0216 14:59:04.084197 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:59:04 crc kubenswrapper[4816]: I0216 14:59:04.160545 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:59:04 crc kubenswrapper[4816]: I0216 14:59:04.322793 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6flgb"] Feb 16 14:59:06 crc kubenswrapper[4816]: I0216 14:59:06.444218 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6flgb" podUID="e2016268-8c82-4436-86a5-2c245260e96b" containerName="registry-server" containerID="cri-o://36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac" gracePeriod=2 Feb 16 14:59:06 crc kubenswrapper[4816]: I0216 14:59:06.965044 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.147646 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-catalog-content\") pod \"e2016268-8c82-4436-86a5-2c245260e96b\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.148051 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njd9r\" (UniqueName: \"kubernetes.io/projected/e2016268-8c82-4436-86a5-2c245260e96b-kube-api-access-njd9r\") pod \"e2016268-8c82-4436-86a5-2c245260e96b\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.148252 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-utilities\") pod \"e2016268-8c82-4436-86a5-2c245260e96b\" (UID: \"e2016268-8c82-4436-86a5-2c245260e96b\") " Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.149852 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-utilities" (OuterVolumeSpecName: "utilities") pod "e2016268-8c82-4436-86a5-2c245260e96b" (UID: "e2016268-8c82-4436-86a5-2c245260e96b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.154990 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2016268-8c82-4436-86a5-2c245260e96b-kube-api-access-njd9r" (OuterVolumeSpecName: "kube-api-access-njd9r") pod "e2016268-8c82-4436-86a5-2c245260e96b" (UID: "e2016268-8c82-4436-86a5-2c245260e96b"). InnerVolumeSpecName "kube-api-access-njd9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.174276 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2016268-8c82-4436-86a5-2c245260e96b" (UID: "e2016268-8c82-4436-86a5-2c245260e96b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.252065 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.252114 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njd9r\" (UniqueName: \"kubernetes.io/projected/e2016268-8c82-4436-86a5-2c245260e96b-kube-api-access-njd9r\") on node \"crc\" DevicePath \"\"" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.252134 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2016268-8c82-4436-86a5-2c245260e96b-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.399328 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:59:07 crc kubenswrapper[4816]: E0216 14:59:07.399852 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.467990 4816 generic.go:334] "Generic (PLEG): container finished" podID="e2016268-8c82-4436-86a5-2c245260e96b" containerID="36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac" exitCode=0 Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.468024 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6flgb" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.468074 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6flgb" event={"ID":"e2016268-8c82-4436-86a5-2c245260e96b","Type":"ContainerDied","Data":"36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac"} Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.468125 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6flgb" event={"ID":"e2016268-8c82-4436-86a5-2c245260e96b","Type":"ContainerDied","Data":"bedd6e529bb895cb0306d14bb8979b33e68cc18ba409016bcc932d86a6d13299"} Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.468150 4816 scope.go:117] "RemoveContainer" containerID="36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.581765 4816 scope.go:117] "RemoveContainer" containerID="1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.587879 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6flgb"] Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.600741 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6flgb"] Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.609550 4816 scope.go:117] "RemoveContainer" containerID="062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.651386 4816 scope.go:117] "RemoveContainer" containerID="36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac" Feb 16 14:59:07 crc kubenswrapper[4816]: E0216 14:59:07.651881 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac\": container with ID starting with 36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac not found: ID does not exist" containerID="36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.651923 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac"} err="failed to get container status \"36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac\": rpc error: code = NotFound desc = could not find container \"36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac\": container with ID starting with 36968c867a81d138689f39afcbea13c9c0fd8b3199d5dfc1ccf127a87de37aac not found: ID does not exist" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.651953 4816 scope.go:117] "RemoveContainer" containerID="1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f" Feb 16 14:59:07 crc kubenswrapper[4816]: E0216 14:59:07.652287 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f\": container with ID starting with 1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f not found: ID does not exist" containerID="1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.652336 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f"} err="failed to get container status \"1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f\": rpc error: code = NotFound desc = could not find container \"1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f\": container with ID starting with 1ec2f1290d98a1805f0711cc0c3f63a8f75240ceb6c40d457ec5fea4ca30a23f not found: ID does not exist" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.652363 4816 scope.go:117] "RemoveContainer" containerID="062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d" Feb 16 14:59:07 crc kubenswrapper[4816]: E0216 14:59:07.652717 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d\": container with ID starting with 062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d not found: ID does not exist" containerID="062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d" Feb 16 14:59:07 crc kubenswrapper[4816]: I0216 14:59:07.652744 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d"} err="failed to get container status \"062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d\": rpc error: code = NotFound desc = could not find container \"062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d\": container with ID starting with 062d285ea429925295550d048594e2513d67692141f47a391acb4b8dc8e81e2d not found: ID does not exist" Feb 16 14:59:09 crc kubenswrapper[4816]: I0216 14:59:09.434090 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2016268-8c82-4436-86a5-2c245260e96b" path="/var/lib/kubelet/pods/e2016268-8c82-4436-86a5-2c245260e96b/volumes" Feb 16 14:59:22 crc kubenswrapper[4816]: I0216 14:59:22.399771 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:59:22 crc kubenswrapper[4816]: E0216 14:59:22.400648 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:59:35 crc kubenswrapper[4816]: I0216 14:59:35.399006 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:59:35 crc kubenswrapper[4816]: E0216 14:59:35.400039 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 14:59:48 crc kubenswrapper[4816]: I0216 14:59:48.399578 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 14:59:48 crc kubenswrapper[4816]: E0216 14:59:48.400934 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.171749 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t"] Feb 16 15:00:00 crc kubenswrapper[4816]: E0216 15:00:00.173252 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2016268-8c82-4436-86a5-2c245260e96b" containerName="extract-content" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.173287 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2016268-8c82-4436-86a5-2c245260e96b" containerName="extract-content" Feb 16 15:00:00 crc kubenswrapper[4816]: E0216 15:00:00.173318 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2016268-8c82-4436-86a5-2c245260e96b" containerName="extract-utilities" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.173330 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2016268-8c82-4436-86a5-2c245260e96b" containerName="extract-utilities" Feb 16 15:00:00 crc kubenswrapper[4816]: E0216 15:00:00.173362 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2016268-8c82-4436-86a5-2c245260e96b" containerName="registry-server" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.173373 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2016268-8c82-4436-86a5-2c245260e96b" containerName="registry-server" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.173759 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2016268-8c82-4436-86a5-2c245260e96b" containerName="registry-server" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.175024 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.182505 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.184436 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.196386 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t"] Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.282311 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv6md\" (UniqueName: \"kubernetes.io/projected/ddb6aa19-9826-4b45-96af-ee1f81060e16-kube-api-access-zv6md\") pod \"collect-profiles-29520900-s4q7t\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.282810 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ddb6aa19-9826-4b45-96af-ee1f81060e16-secret-volume\") pod \"collect-profiles-29520900-s4q7t\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.282981 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ddb6aa19-9826-4b45-96af-ee1f81060e16-config-volume\") pod \"collect-profiles-29520900-s4q7t\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.384505 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv6md\" (UniqueName: \"kubernetes.io/projected/ddb6aa19-9826-4b45-96af-ee1f81060e16-kube-api-access-zv6md\") pod \"collect-profiles-29520900-s4q7t\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.384614 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ddb6aa19-9826-4b45-96af-ee1f81060e16-secret-volume\") pod \"collect-profiles-29520900-s4q7t\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.384663 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ddb6aa19-9826-4b45-96af-ee1f81060e16-config-volume\") pod \"collect-profiles-29520900-s4q7t\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.386088 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ddb6aa19-9826-4b45-96af-ee1f81060e16-config-volume\") pod \"collect-profiles-29520900-s4q7t\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.394106 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ddb6aa19-9826-4b45-96af-ee1f81060e16-secret-volume\") pod \"collect-profiles-29520900-s4q7t\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.406688 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv6md\" (UniqueName: \"kubernetes.io/projected/ddb6aa19-9826-4b45-96af-ee1f81060e16-kube-api-access-zv6md\") pod \"collect-profiles-29520900-s4q7t\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.506148 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:00 crc kubenswrapper[4816]: I0216 15:00:00.979187 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t"] Feb 16 15:00:01 crc kubenswrapper[4816]: I0216 15:00:01.217208 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" event={"ID":"ddb6aa19-9826-4b45-96af-ee1f81060e16","Type":"ContainerStarted","Data":"da488c5e1ef030897b774525f88d954a2ec2ae073e905c7d63855ba72afd63a1"} Feb 16 15:00:01 crc kubenswrapper[4816]: I0216 15:00:01.217270 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" event={"ID":"ddb6aa19-9826-4b45-96af-ee1f81060e16","Type":"ContainerStarted","Data":"edb4a309f901dce49a9c2ba08df9cedf5ddf42c55cc45d3ee8b9c15d493e22fa"} Feb 16 15:00:01 crc kubenswrapper[4816]: I0216 15:00:01.237614 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" podStartSLOduration=1.2375636349999999 podStartE2EDuration="1.237563635s" podCreationTimestamp="2026-02-16 15:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 15:00:01.233014311 +0000 UTC m=+7000.559728049" watchObservedRunningTime="2026-02-16 15:00:01.237563635 +0000 UTC m=+7000.564277363" Feb 16 15:00:02 crc kubenswrapper[4816]: I0216 15:00:02.227602 4816 generic.go:334] "Generic (PLEG): container finished" podID="ddb6aa19-9826-4b45-96af-ee1f81060e16" containerID="da488c5e1ef030897b774525f88d954a2ec2ae073e905c7d63855ba72afd63a1" exitCode=0 Feb 16 15:00:02 crc kubenswrapper[4816]: I0216 15:00:02.227700 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" event={"ID":"ddb6aa19-9826-4b45-96af-ee1f81060e16","Type":"ContainerDied","Data":"da488c5e1ef030897b774525f88d954a2ec2ae073e905c7d63855ba72afd63a1"} Feb 16 15:00:02 crc kubenswrapper[4816]: I0216 15:00:02.399550 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 15:00:02 crc kubenswrapper[4816]: E0216 15:00:02.400138 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.738743 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.764471 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv6md\" (UniqueName: \"kubernetes.io/projected/ddb6aa19-9826-4b45-96af-ee1f81060e16-kube-api-access-zv6md\") pod \"ddb6aa19-9826-4b45-96af-ee1f81060e16\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.764603 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ddb6aa19-9826-4b45-96af-ee1f81060e16-config-volume\") pod \"ddb6aa19-9826-4b45-96af-ee1f81060e16\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.764735 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ddb6aa19-9826-4b45-96af-ee1f81060e16-secret-volume\") pod \"ddb6aa19-9826-4b45-96af-ee1f81060e16\" (UID: \"ddb6aa19-9826-4b45-96af-ee1f81060e16\") " Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.766325 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddb6aa19-9826-4b45-96af-ee1f81060e16-config-volume" (OuterVolumeSpecName: "config-volume") pod "ddb6aa19-9826-4b45-96af-ee1f81060e16" (UID: "ddb6aa19-9826-4b45-96af-ee1f81060e16"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.771890 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddb6aa19-9826-4b45-96af-ee1f81060e16-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ddb6aa19-9826-4b45-96af-ee1f81060e16" (UID: "ddb6aa19-9826-4b45-96af-ee1f81060e16"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.775260 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddb6aa19-9826-4b45-96af-ee1f81060e16-kube-api-access-zv6md" (OuterVolumeSpecName: "kube-api-access-zv6md") pod "ddb6aa19-9826-4b45-96af-ee1f81060e16" (UID: "ddb6aa19-9826-4b45-96af-ee1f81060e16"). InnerVolumeSpecName "kube-api-access-zv6md". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.866925 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ddb6aa19-9826-4b45-96af-ee1f81060e16-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.866972 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ddb6aa19-9826-4b45-96af-ee1f81060e16-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 15:00:03 crc kubenswrapper[4816]: I0216 15:00:03.866986 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv6md\" (UniqueName: \"kubernetes.io/projected/ddb6aa19-9826-4b45-96af-ee1f81060e16-kube-api-access-zv6md\") on node \"crc\" DevicePath \"\"" Feb 16 15:00:04 crc kubenswrapper[4816]: I0216 15:00:04.248603 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" event={"ID":"ddb6aa19-9826-4b45-96af-ee1f81060e16","Type":"ContainerDied","Data":"edb4a309f901dce49a9c2ba08df9cedf5ddf42c55cc45d3ee8b9c15d493e22fa"} Feb 16 15:00:04 crc kubenswrapper[4816]: I0216 15:00:04.248965 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edb4a309f901dce49a9c2ba08df9cedf5ddf42c55cc45d3ee8b9c15d493e22fa" Feb 16 15:00:04 crc kubenswrapper[4816]: I0216 15:00:04.248638 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t" Feb 16 15:00:04 crc kubenswrapper[4816]: I0216 15:00:04.325460 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st"] Feb 16 15:00:04 crc kubenswrapper[4816]: I0216 15:00:04.338268 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520855-d64st"] Feb 16 15:00:05 crc kubenswrapper[4816]: I0216 15:00:05.412723 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f65357b-a782-4d03-bb11-e7fba09ca5f8" path="/var/lib/kubelet/pods/2f65357b-a782-4d03-bb11-e7fba09ca5f8/volumes" Feb 16 15:00:13 crc kubenswrapper[4816]: I0216 15:00:13.399765 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 15:00:13 crc kubenswrapper[4816]: E0216 15:00:13.400712 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:00:27 crc kubenswrapper[4816]: I0216 15:00:27.399725 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 15:00:27 crc kubenswrapper[4816]: E0216 15:00:27.401012 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:00:32 crc kubenswrapper[4816]: I0216 15:00:32.767356 4816 scope.go:117] "RemoveContainer" containerID="0528d891756480a552172f9aa02a3ed557cf4d8ab4d69521c6a22b3f9a916fc7" Feb 16 15:00:32 crc kubenswrapper[4816]: I0216 15:00:32.814816 4816 scope.go:117] "RemoveContainer" containerID="c7aaebcda910099191102012e7f4056f1f678285653262e0b510637a4ce24e86" Feb 16 15:00:32 crc kubenswrapper[4816]: I0216 15:00:32.876803 4816 scope.go:117] "RemoveContainer" containerID="857f23303f1625e612e0ed19029bb692c304f1c519a893362a964192174f4bf9" Feb 16 15:00:32 crc kubenswrapper[4816]: I0216 15:00:32.930187 4816 scope.go:117] "RemoveContainer" containerID="fef7d4edaacfcad5d0a6cca8f937a920f732c3185d439a458fb2f7086c73ca16" Feb 16 15:00:42 crc kubenswrapper[4816]: I0216 15:00:42.398488 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 15:00:43 crc kubenswrapper[4816]: I0216 15:00:43.699627 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"7b7768d35603acc2ec6f8a3a3978f27ffd1b0d30c9fd3424095a1924ca5f68fd"} Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.169865 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29520901-wgdcv"] Feb 16 15:01:00 crc kubenswrapper[4816]: E0216 15:01:00.171087 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddb6aa19-9826-4b45-96af-ee1f81060e16" containerName="collect-profiles" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.171109 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddb6aa19-9826-4b45-96af-ee1f81060e16" containerName="collect-profiles" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.171357 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddb6aa19-9826-4b45-96af-ee1f81060e16" containerName="collect-profiles" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.172209 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.197909 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29520901-wgdcv"] Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.212896 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-combined-ca-bundle\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.213030 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-config-data\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.213067 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc2rm\" (UniqueName: \"kubernetes.io/projected/4345a6a3-62a1-40c1-a611-37289fe170fd-kube-api-access-fc2rm\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.213141 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-fernet-keys\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.315122 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-fernet-keys\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.315240 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-combined-ca-bundle\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.315326 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-config-data\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.315349 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc2rm\" (UniqueName: \"kubernetes.io/projected/4345a6a3-62a1-40c1-a611-37289fe170fd-kube-api-access-fc2rm\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.321675 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-fernet-keys\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.321972 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-combined-ca-bundle\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.331671 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-config-data\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.336934 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc2rm\" (UniqueName: \"kubernetes.io/projected/4345a6a3-62a1-40c1-a611-37289fe170fd-kube-api-access-fc2rm\") pod \"keystone-cron-29520901-wgdcv\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.523698 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:00 crc kubenswrapper[4816]: I0216 15:01:00.978010 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29520901-wgdcv"] Feb 16 15:01:00 crc kubenswrapper[4816]: W0216 15:01:00.985612 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4345a6a3_62a1_40c1_a611_37289fe170fd.slice/crio-83734c90fbef9ec7ef4abdc0c6fc5dcce114f3273f7a3865d209dcb2e7441cc5 WatchSource:0}: Error finding container 83734c90fbef9ec7ef4abdc0c6fc5dcce114f3273f7a3865d209dcb2e7441cc5: Status 404 returned error can't find the container with id 83734c90fbef9ec7ef4abdc0c6fc5dcce114f3273f7a3865d209dcb2e7441cc5 Feb 16 15:01:01 crc kubenswrapper[4816]: I0216 15:01:01.909077 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29520901-wgdcv" event={"ID":"4345a6a3-62a1-40c1-a611-37289fe170fd","Type":"ContainerStarted","Data":"1c4414a5c8562a589d6ec425fd0bb3549a07e1121fa2fc768ff9c00358b23391"} Feb 16 15:01:01 crc kubenswrapper[4816]: I0216 15:01:01.910105 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29520901-wgdcv" event={"ID":"4345a6a3-62a1-40c1-a611-37289fe170fd","Type":"ContainerStarted","Data":"83734c90fbef9ec7ef4abdc0c6fc5dcce114f3273f7a3865d209dcb2e7441cc5"} Feb 16 15:01:01 crc kubenswrapper[4816]: I0216 15:01:01.932320 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29520901-wgdcv" podStartSLOduration=1.932281562 podStartE2EDuration="1.932281562s" podCreationTimestamp="2026-02-16 15:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 15:01:01.929806025 +0000 UTC m=+7061.256519763" watchObservedRunningTime="2026-02-16 15:01:01.932281562 +0000 UTC m=+7061.258995330" Feb 16 15:01:04 crc kubenswrapper[4816]: I0216 15:01:04.946792 4816 generic.go:334] "Generic (PLEG): container finished" podID="4345a6a3-62a1-40c1-a611-37289fe170fd" containerID="1c4414a5c8562a589d6ec425fd0bb3549a07e1121fa2fc768ff9c00358b23391" exitCode=0 Feb 16 15:01:04 crc kubenswrapper[4816]: I0216 15:01:04.946906 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29520901-wgdcv" event={"ID":"4345a6a3-62a1-40c1-a611-37289fe170fd","Type":"ContainerDied","Data":"1c4414a5c8562a589d6ec425fd0bb3549a07e1121fa2fc768ff9c00358b23391"} Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.366836 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.372086 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-fernet-keys\") pod \"4345a6a3-62a1-40c1-a611-37289fe170fd\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.372176 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-combined-ca-bundle\") pod \"4345a6a3-62a1-40c1-a611-37289fe170fd\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.372210 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-config-data\") pod \"4345a6a3-62a1-40c1-a611-37289fe170fd\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.372414 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc2rm\" (UniqueName: \"kubernetes.io/projected/4345a6a3-62a1-40c1-a611-37289fe170fd-kube-api-access-fc2rm\") pod \"4345a6a3-62a1-40c1-a611-37289fe170fd\" (UID: \"4345a6a3-62a1-40c1-a611-37289fe170fd\") " Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.378917 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4345a6a3-62a1-40c1-a611-37289fe170fd-kube-api-access-fc2rm" (OuterVolumeSpecName: "kube-api-access-fc2rm") pod "4345a6a3-62a1-40c1-a611-37289fe170fd" (UID: "4345a6a3-62a1-40c1-a611-37289fe170fd"). InnerVolumeSpecName "kube-api-access-fc2rm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.382910 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4345a6a3-62a1-40c1-a611-37289fe170fd" (UID: "4345a6a3-62a1-40c1-a611-37289fe170fd"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.419838 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4345a6a3-62a1-40c1-a611-37289fe170fd" (UID: "4345a6a3-62a1-40c1-a611-37289fe170fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.451942 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-config-data" (OuterVolumeSpecName: "config-data") pod "4345a6a3-62a1-40c1-a611-37289fe170fd" (UID: "4345a6a3-62a1-40c1-a611-37289fe170fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.475459 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc2rm\" (UniqueName: \"kubernetes.io/projected/4345a6a3-62a1-40c1-a611-37289fe170fd-kube-api-access-fc2rm\") on node \"crc\" DevicePath \"\"" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.475489 4816 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.475499 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.475508 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4345a6a3-62a1-40c1-a611-37289fe170fd-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.982120 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29520901-wgdcv" Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.982069 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29520901-wgdcv" event={"ID":"4345a6a3-62a1-40c1-a611-37289fe170fd","Type":"ContainerDied","Data":"83734c90fbef9ec7ef4abdc0c6fc5dcce114f3273f7a3865d209dcb2e7441cc5"} Feb 16 15:01:06 crc kubenswrapper[4816]: I0216 15:01:06.984833 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83734c90fbef9ec7ef4abdc0c6fc5dcce114f3273f7a3865d209dcb2e7441cc5" Feb 16 15:01:10 crc kubenswrapper[4816]: I0216 15:01:10.019487 4816 generic.go:334] "Generic (PLEG): container finished" podID="6618881a-03b6-4eec-b4e5-850bbce3a0ce" containerID="a8d36f91e3cb786f094fd5fda9b273352090d650dccbd83993b61c3229c0ee47" exitCode=0 Feb 16 15:01:10 crc kubenswrapper[4816]: I0216 15:01:10.019576 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" event={"ID":"6618881a-03b6-4eec-b4e5-850bbce3a0ce","Type":"ContainerDied","Data":"a8d36f91e3cb786f094fd5fda9b273352090d650dccbd83993b61c3229c0ee47"} Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.611780 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.628101 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ceph\") pod \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.628165 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-inventory\") pod \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.628211 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-bootstrap-combined-ca-bundle\") pod \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.628261 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ssh-key-openstack-cell1\") pod \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.638435 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "6618881a-03b6-4eec-b4e5-850bbce3a0ce" (UID: "6618881a-03b6-4eec-b4e5-850bbce3a0ce"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.638882 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ceph" (OuterVolumeSpecName: "ceph") pod "6618881a-03b6-4eec-b4e5-850bbce3a0ce" (UID: "6618881a-03b6-4eec-b4e5-850bbce3a0ce"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.675004 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "6618881a-03b6-4eec-b4e5-850bbce3a0ce" (UID: "6618881a-03b6-4eec-b4e5-850bbce3a0ce"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.683043 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-inventory" (OuterVolumeSpecName: "inventory") pod "6618881a-03b6-4eec-b4e5-850bbce3a0ce" (UID: "6618881a-03b6-4eec-b4e5-850bbce3a0ce"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.733092 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s55tt\" (UniqueName: \"kubernetes.io/projected/6618881a-03b6-4eec-b4e5-850bbce3a0ce-kube-api-access-s55tt\") pod \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\" (UID: \"6618881a-03b6-4eec-b4e5-850bbce3a0ce\") " Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.733612 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.733630 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.733640 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.733665 4816 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6618881a-03b6-4eec-b4e5-850bbce3a0ce-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.735730 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6618881a-03b6-4eec-b4e5-850bbce3a0ce-kube-api-access-s55tt" (OuterVolumeSpecName: "kube-api-access-s55tt") pod "6618881a-03b6-4eec-b4e5-850bbce3a0ce" (UID: "6618881a-03b6-4eec-b4e5-850bbce3a0ce"). InnerVolumeSpecName "kube-api-access-s55tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:01:11 crc kubenswrapper[4816]: I0216 15:01:11.835228 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s55tt\" (UniqueName: \"kubernetes.io/projected/6618881a-03b6-4eec-b4e5-850bbce3a0ce-kube-api-access-s55tt\") on node \"crc\" DevicePath \"\"" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.040957 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" event={"ID":"6618881a-03b6-4eec-b4e5-850bbce3a0ce","Type":"ContainerDied","Data":"8529a7d9ccb651962ba6539d27c620bc3fc3ac5592c8cba9f86abd96239cb83c"} Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.041001 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8529a7d9ccb651962ba6539d27c620bc3fc3ac5592c8cba9f86abd96239cb83c" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.041079 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-kdxx4" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.213992 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-gbsjp"] Feb 16 15:01:12 crc kubenswrapper[4816]: E0216 15:01:12.214485 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4345a6a3-62a1-40c1-a611-37289fe170fd" containerName="keystone-cron" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.214502 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4345a6a3-62a1-40c1-a611-37289fe170fd" containerName="keystone-cron" Feb 16 15:01:12 crc kubenswrapper[4816]: E0216 15:01:12.214542 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6618881a-03b6-4eec-b4e5-850bbce3a0ce" containerName="bootstrap-openstack-openstack-cell1" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.214550 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6618881a-03b6-4eec-b4e5-850bbce3a0ce" containerName="bootstrap-openstack-openstack-cell1" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.220851 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4345a6a3-62a1-40c1-a611-37289fe170fd" containerName="keystone-cron" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.220915 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="6618881a-03b6-4eec-b4e5-850bbce3a0ce" containerName="bootstrap-openstack-openstack-cell1" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.222035 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.234332 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.234961 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.235150 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.235450 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.280731 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-gbsjp"] Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.365786 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.365854 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ceph\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.365953 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfbsq\" (UniqueName: \"kubernetes.io/projected/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-kube-api-access-xfbsq\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.365991 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-inventory\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.467978 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfbsq\" (UniqueName: \"kubernetes.io/projected/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-kube-api-access-xfbsq\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.468058 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-inventory\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.468132 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.468192 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ceph\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.473319 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.473367 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-inventory\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.473762 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ceph\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.484891 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfbsq\" (UniqueName: \"kubernetes.io/projected/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-kube-api-access-xfbsq\") pod \"download-cache-openstack-openstack-cell1-gbsjp\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:12 crc kubenswrapper[4816]: I0216 15:01:12.570500 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:01:13 crc kubenswrapper[4816]: I0216 15:01:13.180149 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-gbsjp"] Feb 16 15:01:13 crc kubenswrapper[4816]: I0216 15:01:13.192966 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 15:01:14 crc kubenswrapper[4816]: I0216 15:01:14.062428 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" event={"ID":"ce44ed35-23a8-40ad-ac4a-4925f19d14ec","Type":"ContainerStarted","Data":"9c08e61739f970699781f16f49acbd4c6a5b665a978b55fadc55ca09cce61227"} Feb 16 15:01:14 crc kubenswrapper[4816]: I0216 15:01:14.082957 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" podStartSLOduration=1.502541624 podStartE2EDuration="2.082927744s" podCreationTimestamp="2026-02-16 15:01:12 +0000 UTC" firstStartedPulling="2026-02-16 15:01:13.192708749 +0000 UTC m=+7072.519422477" lastFinishedPulling="2026-02-16 15:01:13.773094859 +0000 UTC m=+7073.099808597" observedRunningTime="2026-02-16 15:01:14.082916903 +0000 UTC m=+7073.409630631" watchObservedRunningTime="2026-02-16 15:01:14.082927744 +0000 UTC m=+7073.409641472" Feb 16 15:01:15 crc kubenswrapper[4816]: I0216 15:01:15.087335 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" event={"ID":"ce44ed35-23a8-40ad-ac4a-4925f19d14ec","Type":"ContainerStarted","Data":"129390fd077fba9cd9ae583a9541e982ebc47f1fe9ef4af332fe105f582ca945"} Feb 16 15:02:45 crc kubenswrapper[4816]: I0216 15:02:45.227843 4816 generic.go:334] "Generic (PLEG): container finished" podID="ce44ed35-23a8-40ad-ac4a-4925f19d14ec" containerID="129390fd077fba9cd9ae583a9541e982ebc47f1fe9ef4af332fe105f582ca945" exitCode=0 Feb 16 15:02:45 crc kubenswrapper[4816]: I0216 15:02:45.227901 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" event={"ID":"ce44ed35-23a8-40ad-ac4a-4925f19d14ec","Type":"ContainerDied","Data":"129390fd077fba9cd9ae583a9541e982ebc47f1fe9ef4af332fe105f582ca945"} Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.769246 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.892616 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfbsq\" (UniqueName: \"kubernetes.io/projected/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-kube-api-access-xfbsq\") pod \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.892795 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ssh-key-openstack-cell1\") pod \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.892900 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ceph\") pod \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.893122 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-inventory\") pod \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\" (UID: \"ce44ed35-23a8-40ad-ac4a-4925f19d14ec\") " Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.899184 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-kube-api-access-xfbsq" (OuterVolumeSpecName: "kube-api-access-xfbsq") pod "ce44ed35-23a8-40ad-ac4a-4925f19d14ec" (UID: "ce44ed35-23a8-40ad-ac4a-4925f19d14ec"). InnerVolumeSpecName "kube-api-access-xfbsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.899860 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ceph" (OuterVolumeSpecName: "ceph") pod "ce44ed35-23a8-40ad-ac4a-4925f19d14ec" (UID: "ce44ed35-23a8-40ad-ac4a-4925f19d14ec"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.923861 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "ce44ed35-23a8-40ad-ac4a-4925f19d14ec" (UID: "ce44ed35-23a8-40ad-ac4a-4925f19d14ec"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.932489 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-inventory" (OuterVolumeSpecName: "inventory") pod "ce44ed35-23a8-40ad-ac4a-4925f19d14ec" (UID: "ce44ed35-23a8-40ad-ac4a-4925f19d14ec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.995265 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.995298 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfbsq\" (UniqueName: \"kubernetes.io/projected/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-kube-api-access-xfbsq\") on node \"crc\" DevicePath \"\"" Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.995309 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:02:46 crc kubenswrapper[4816]: I0216 15:02:46.995317 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce44ed35-23a8-40ad-ac4a-4925f19d14ec-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.255065 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" event={"ID":"ce44ed35-23a8-40ad-ac4a-4925f19d14ec","Type":"ContainerDied","Data":"9c08e61739f970699781f16f49acbd4c6a5b665a978b55fadc55ca09cce61227"} Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.255104 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-gbsjp" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.255129 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c08e61739f970699781f16f49acbd4c6a5b665a978b55fadc55ca09cce61227" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.363270 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-xk7gv"] Feb 16 15:02:47 crc kubenswrapper[4816]: E0216 15:02:47.363978 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce44ed35-23a8-40ad-ac4a-4925f19d14ec" containerName="download-cache-openstack-openstack-cell1" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.364021 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce44ed35-23a8-40ad-ac4a-4925f19d14ec" containerName="download-cache-openstack-openstack-cell1" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.364450 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce44ed35-23a8-40ad-ac4a-4925f19d14ec" containerName="download-cache-openstack-openstack-cell1" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.365772 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.368603 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.368612 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.369118 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.373601 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-xk7gv"] Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.374484 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.402990 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-inventory\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.403111 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.403168 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ceph\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.403236 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj67l\" (UniqueName: \"kubernetes.io/projected/e961c0a2-5b1d-4f14-975f-ec640a801439-kube-api-access-mj67l\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.505989 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-inventory\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.506835 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.506876 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ceph\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.506943 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj67l\" (UniqueName: \"kubernetes.io/projected/e961c0a2-5b1d-4f14-975f-ec640a801439-kube-api-access-mj67l\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.510128 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-inventory\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.510583 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.511399 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ceph\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.538592 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj67l\" (UniqueName: \"kubernetes.io/projected/e961c0a2-5b1d-4f14-975f-ec640a801439-kube-api-access-mj67l\") pod \"configure-network-openstack-openstack-cell1-xk7gv\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:47 crc kubenswrapper[4816]: I0216 15:02:47.691739 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:02:48 crc kubenswrapper[4816]: I0216 15:02:48.252250 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-xk7gv"] Feb 16 15:02:48 crc kubenswrapper[4816]: I0216 15:02:48.266713 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" event={"ID":"e961c0a2-5b1d-4f14-975f-ec640a801439","Type":"ContainerStarted","Data":"2e256818ae3dd74864593e4a2d4efb675a8f2af3928dee91a39d236b2c010346"} Feb 16 15:02:49 crc kubenswrapper[4816]: I0216 15:02:49.310862 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" event={"ID":"e961c0a2-5b1d-4f14-975f-ec640a801439","Type":"ContainerStarted","Data":"723c4a9861a2d783895e205d8365838f17df6721a11eacb1127c2e963357b1ff"} Feb 16 15:02:49 crc kubenswrapper[4816]: I0216 15:02:49.360950 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" podStartSLOduration=1.9264367679999999 podStartE2EDuration="2.360818428s" podCreationTimestamp="2026-02-16 15:02:47 +0000 UTC" firstStartedPulling="2026-02-16 15:02:48.259283693 +0000 UTC m=+7167.585997421" lastFinishedPulling="2026-02-16 15:02:48.693665353 +0000 UTC m=+7168.020379081" observedRunningTime="2026-02-16 15:02:49.340966047 +0000 UTC m=+7168.667679775" watchObservedRunningTime="2026-02-16 15:02:49.360818428 +0000 UTC m=+7168.687532146" Feb 16 15:03:06 crc kubenswrapper[4816]: I0216 15:03:06.941006 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:03:06 crc kubenswrapper[4816]: I0216 15:03:06.942732 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:03:36 crc kubenswrapper[4816]: I0216 15:03:36.941200 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:03:36 crc kubenswrapper[4816]: I0216 15:03:36.941718 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:04:06 crc kubenswrapper[4816]: I0216 15:04:06.940999 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:04:06 crc kubenswrapper[4816]: I0216 15:04:06.941759 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:04:06 crc kubenswrapper[4816]: I0216 15:04:06.941844 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 15:04:06 crc kubenswrapper[4816]: I0216 15:04:06.943768 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7b7768d35603acc2ec6f8a3a3978f27ffd1b0d30c9fd3424095a1924ca5f68fd"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 15:04:06 crc kubenswrapper[4816]: I0216 15:04:06.943963 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://7b7768d35603acc2ec6f8a3a3978f27ffd1b0d30c9fd3424095a1924ca5f68fd" gracePeriod=600 Feb 16 15:04:07 crc kubenswrapper[4816]: I0216 15:04:07.152323 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="7b7768d35603acc2ec6f8a3a3978f27ffd1b0d30c9fd3424095a1924ca5f68fd" exitCode=0 Feb 16 15:04:07 crc kubenswrapper[4816]: I0216 15:04:07.152424 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"7b7768d35603acc2ec6f8a3a3978f27ffd1b0d30c9fd3424095a1924ca5f68fd"} Feb 16 15:04:07 crc kubenswrapper[4816]: I0216 15:04:07.152832 4816 scope.go:117] "RemoveContainer" containerID="3917a0c37b8f669de803df9274b9885aca62f8ef73e66691b1306b254d42e8ef" Feb 16 15:04:08 crc kubenswrapper[4816]: I0216 15:04:08.166888 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2"} Feb 16 15:04:15 crc kubenswrapper[4816]: I0216 15:04:15.469093 4816 generic.go:334] "Generic (PLEG): container finished" podID="e961c0a2-5b1d-4f14-975f-ec640a801439" containerID="723c4a9861a2d783895e205d8365838f17df6721a11eacb1127c2e963357b1ff" exitCode=0 Feb 16 15:04:15 crc kubenswrapper[4816]: I0216 15:04:15.469297 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" event={"ID":"e961c0a2-5b1d-4f14-975f-ec640a801439","Type":"ContainerDied","Data":"723c4a9861a2d783895e205d8365838f17df6721a11eacb1127c2e963357b1ff"} Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.037945 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.189084 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ssh-key-openstack-cell1\") pod \"e961c0a2-5b1d-4f14-975f-ec640a801439\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.189369 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mj67l\" (UniqueName: \"kubernetes.io/projected/e961c0a2-5b1d-4f14-975f-ec640a801439-kube-api-access-mj67l\") pod \"e961c0a2-5b1d-4f14-975f-ec640a801439\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.189447 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ceph\") pod \"e961c0a2-5b1d-4f14-975f-ec640a801439\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.189482 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-inventory\") pod \"e961c0a2-5b1d-4f14-975f-ec640a801439\" (UID: \"e961c0a2-5b1d-4f14-975f-ec640a801439\") " Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.194837 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ceph" (OuterVolumeSpecName: "ceph") pod "e961c0a2-5b1d-4f14-975f-ec640a801439" (UID: "e961c0a2-5b1d-4f14-975f-ec640a801439"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.199131 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e961c0a2-5b1d-4f14-975f-ec640a801439-kube-api-access-mj67l" (OuterVolumeSpecName: "kube-api-access-mj67l") pod "e961c0a2-5b1d-4f14-975f-ec640a801439" (UID: "e961c0a2-5b1d-4f14-975f-ec640a801439"). InnerVolumeSpecName "kube-api-access-mj67l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.220368 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "e961c0a2-5b1d-4f14-975f-ec640a801439" (UID: "e961c0a2-5b1d-4f14-975f-ec640a801439"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.241895 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-inventory" (OuterVolumeSpecName: "inventory") pod "e961c0a2-5b1d-4f14-975f-ec640a801439" (UID: "e961c0a2-5b1d-4f14-975f-ec640a801439"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.292880 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.292918 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.292931 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e961c0a2-5b1d-4f14-975f-ec640a801439-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.292941 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mj67l\" (UniqueName: \"kubernetes.io/projected/e961c0a2-5b1d-4f14-975f-ec640a801439-kube-api-access-mj67l\") on node \"crc\" DevicePath \"\"" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.495529 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" event={"ID":"e961c0a2-5b1d-4f14-975f-ec640a801439","Type":"ContainerDied","Data":"2e256818ae3dd74864593e4a2d4efb675a8f2af3928dee91a39d236b2c010346"} Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.496314 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e256818ae3dd74864593e4a2d4efb675a8f2af3928dee91a39d236b2c010346" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.495587 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-xk7gv" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.612525 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-fbgc6"] Feb 16 15:04:17 crc kubenswrapper[4816]: E0216 15:04:17.613039 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e961c0a2-5b1d-4f14-975f-ec640a801439" containerName="configure-network-openstack-openstack-cell1" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.613063 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e961c0a2-5b1d-4f14-975f-ec640a801439" containerName="configure-network-openstack-openstack-cell1" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.613361 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e961c0a2-5b1d-4f14-975f-ec640a801439" containerName="configure-network-openstack-openstack-cell1" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.614380 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.616013 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.617114 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.617240 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.617364 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.631127 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-fbgc6"] Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.803495 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-inventory\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.803690 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xsn5\" (UniqueName: \"kubernetes.io/projected/6edf3408-6112-4a18-9c9c-2aebc344f6b3-kube-api-access-5xsn5\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.803861 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.804114 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ceph\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.906553 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ceph\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.907113 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-inventory\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.907283 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xsn5\" (UniqueName: \"kubernetes.io/projected/6edf3408-6112-4a18-9c9c-2aebc344f6b3-kube-api-access-5xsn5\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.907444 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.911612 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ceph\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.925092 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-inventory\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.925544 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.931281 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xsn5\" (UniqueName: \"kubernetes.io/projected/6edf3408-6112-4a18-9c9c-2aebc344f6b3-kube-api-access-5xsn5\") pod \"validate-network-openstack-openstack-cell1-fbgc6\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:17 crc kubenswrapper[4816]: I0216 15:04:17.944490 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:18 crc kubenswrapper[4816]: I0216 15:04:18.640829 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-fbgc6"] Feb 16 15:04:19 crc kubenswrapper[4816]: I0216 15:04:19.553532 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" event={"ID":"6edf3408-6112-4a18-9c9c-2aebc344f6b3","Type":"ContainerStarted","Data":"90907de12966d581095b2bcf69d0cfbd0d84bd51768e2c5288344af97bafb69c"} Feb 16 15:04:19 crc kubenswrapper[4816]: I0216 15:04:19.553835 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" event={"ID":"6edf3408-6112-4a18-9c9c-2aebc344f6b3","Type":"ContainerStarted","Data":"18d13b7374a5d12706e3d6274b4ca4a905dc363cdcf8f432e28f53f893e67ffc"} Feb 16 15:04:19 crc kubenswrapper[4816]: I0216 15:04:19.569687 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" podStartSLOduration=2.04405747 podStartE2EDuration="2.569643717s" podCreationTimestamp="2026-02-16 15:04:17 +0000 UTC" firstStartedPulling="2026-02-16 15:04:18.641125957 +0000 UTC m=+7257.967839685" lastFinishedPulling="2026-02-16 15:04:19.166712204 +0000 UTC m=+7258.493425932" observedRunningTime="2026-02-16 15:04:19.567985332 +0000 UTC m=+7258.894699080" watchObservedRunningTime="2026-02-16 15:04:19.569643717 +0000 UTC m=+7258.896357445" Feb 16 15:04:24 crc kubenswrapper[4816]: I0216 15:04:24.883135 4816 generic.go:334] "Generic (PLEG): container finished" podID="6edf3408-6112-4a18-9c9c-2aebc344f6b3" containerID="90907de12966d581095b2bcf69d0cfbd0d84bd51768e2c5288344af97bafb69c" exitCode=0 Feb 16 15:04:24 crc kubenswrapper[4816]: I0216 15:04:24.883602 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" event={"ID":"6edf3408-6112-4a18-9c9c-2aebc344f6b3","Type":"ContainerDied","Data":"90907de12966d581095b2bcf69d0cfbd0d84bd51768e2c5288344af97bafb69c"} Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.379292 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.553007 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ssh-key-openstack-cell1\") pod \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.553348 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ceph\") pod \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.553380 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-inventory\") pod \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.553519 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xsn5\" (UniqueName: \"kubernetes.io/projected/6edf3408-6112-4a18-9c9c-2aebc344f6b3-kube-api-access-5xsn5\") pod \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\" (UID: \"6edf3408-6112-4a18-9c9c-2aebc344f6b3\") " Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.568580 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ceph" (OuterVolumeSpecName: "ceph") pod "6edf3408-6112-4a18-9c9c-2aebc344f6b3" (UID: "6edf3408-6112-4a18-9c9c-2aebc344f6b3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.570065 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6edf3408-6112-4a18-9c9c-2aebc344f6b3-kube-api-access-5xsn5" (OuterVolumeSpecName: "kube-api-access-5xsn5") pod "6edf3408-6112-4a18-9c9c-2aebc344f6b3" (UID: "6edf3408-6112-4a18-9c9c-2aebc344f6b3"). InnerVolumeSpecName "kube-api-access-5xsn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.586460 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-inventory" (OuterVolumeSpecName: "inventory") pod "6edf3408-6112-4a18-9c9c-2aebc344f6b3" (UID: "6edf3408-6112-4a18-9c9c-2aebc344f6b3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.595705 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "6edf3408-6112-4a18-9c9c-2aebc344f6b3" (UID: "6edf3408-6112-4a18-9c9c-2aebc344f6b3"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.758103 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xsn5\" (UniqueName: \"kubernetes.io/projected/6edf3408-6112-4a18-9c9c-2aebc344f6b3-kube-api-access-5xsn5\") on node \"crc\" DevicePath \"\"" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.758156 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.758172 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.758187 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6edf3408-6112-4a18-9c9c-2aebc344f6b3-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.902783 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" event={"ID":"6edf3408-6112-4a18-9c9c-2aebc344f6b3","Type":"ContainerDied","Data":"18d13b7374a5d12706e3d6274b4ca4a905dc363cdcf8f432e28f53f893e67ffc"} Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.902844 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18d13b7374a5d12706e3d6274b4ca4a905dc363cdcf8f432e28f53f893e67ffc" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.902945 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-fbgc6" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.969121 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-24tb8"] Feb 16 15:04:26 crc kubenswrapper[4816]: E0216 15:04:26.969562 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6edf3408-6112-4a18-9c9c-2aebc344f6b3" containerName="validate-network-openstack-openstack-cell1" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.969589 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6edf3408-6112-4a18-9c9c-2aebc344f6b3" containerName="validate-network-openstack-openstack-cell1" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.969824 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="6edf3408-6112-4a18-9c9c-2aebc344f6b3" containerName="validate-network-openstack-openstack-cell1" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.970675 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.972548 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.975673 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.975906 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.976101 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:04:26 crc kubenswrapper[4816]: I0216 15:04:26.984122 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-24tb8"] Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.168172 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ceph\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.168240 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52tz7\" (UniqueName: \"kubernetes.io/projected/78140e20-6208-46cc-a7a5-a64aa3d1dee2-kube-api-access-52tz7\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.168347 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-inventory\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.168818 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.269878 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.270265 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ceph\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.270295 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52tz7\" (UniqueName: \"kubernetes.io/projected/78140e20-6208-46cc-a7a5-a64aa3d1dee2-kube-api-access-52tz7\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.270357 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-inventory\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.274995 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.275049 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ceph\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.276169 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-inventory\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.293977 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52tz7\" (UniqueName: \"kubernetes.io/projected/78140e20-6208-46cc-a7a5-a64aa3d1dee2-kube-api-access-52tz7\") pod \"install-os-openstack-openstack-cell1-24tb8\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:27 crc kubenswrapper[4816]: I0216 15:04:27.593467 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:04:28 crc kubenswrapper[4816]: I0216 15:04:28.130176 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-24tb8"] Feb 16 15:04:28 crc kubenswrapper[4816]: W0216 15:04:28.130799 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78140e20_6208_46cc_a7a5_a64aa3d1dee2.slice/crio-c8c4f6d5d04082bee007de26bcc79ccda281442bdf45ffc8fc96dbbfc6ff33bd WatchSource:0}: Error finding container c8c4f6d5d04082bee007de26bcc79ccda281442bdf45ffc8fc96dbbfc6ff33bd: Status 404 returned error can't find the container with id c8c4f6d5d04082bee007de26bcc79ccda281442bdf45ffc8fc96dbbfc6ff33bd Feb 16 15:04:28 crc kubenswrapper[4816]: I0216 15:04:28.927495 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-24tb8" event={"ID":"78140e20-6208-46cc-a7a5-a64aa3d1dee2","Type":"ContainerStarted","Data":"71f2e7733020ef55548fd0dc11d31bffc8fea4bd88208d1875573832dd2e8936"} Feb 16 15:04:28 crc kubenswrapper[4816]: I0216 15:04:28.928293 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-24tb8" event={"ID":"78140e20-6208-46cc-a7a5-a64aa3d1dee2","Type":"ContainerStarted","Data":"c8c4f6d5d04082bee007de26bcc79ccda281442bdf45ffc8fc96dbbfc6ff33bd"} Feb 16 15:04:28 crc kubenswrapper[4816]: I0216 15:04:28.944199 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-24tb8" podStartSLOduration=2.553816747 podStartE2EDuration="2.944156786s" podCreationTimestamp="2026-02-16 15:04:26 +0000 UTC" firstStartedPulling="2026-02-16 15:04:28.133532201 +0000 UTC m=+7267.460245929" lastFinishedPulling="2026-02-16 15:04:28.52387224 +0000 UTC m=+7267.850585968" observedRunningTime="2026-02-16 15:04:28.942951684 +0000 UTC m=+7268.269665412" watchObservedRunningTime="2026-02-16 15:04:28.944156786 +0000 UTC m=+7268.270870514" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.565442 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7glzr"] Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.570172 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.580114 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7glzr"] Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.660865 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6lwk\" (UniqueName: \"kubernetes.io/projected/925a6f8a-b866-45c3-8059-f3dfcfc7510f-kube-api-access-f6lwk\") pod \"community-operators-7glzr\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.661008 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-catalog-content\") pod \"community-operators-7glzr\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.661120 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-utilities\") pod \"community-operators-7glzr\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.762878 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6lwk\" (UniqueName: \"kubernetes.io/projected/925a6f8a-b866-45c3-8059-f3dfcfc7510f-kube-api-access-f6lwk\") pod \"community-operators-7glzr\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.762966 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-catalog-content\") pod \"community-operators-7glzr\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.763067 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-utilities\") pod \"community-operators-7glzr\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.763622 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-catalog-content\") pod \"community-operators-7glzr\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.763716 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-utilities\") pod \"community-operators-7glzr\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.783782 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6lwk\" (UniqueName: \"kubernetes.io/projected/925a6f8a-b866-45c3-8059-f3dfcfc7510f-kube-api-access-f6lwk\") pod \"community-operators-7glzr\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:10 crc kubenswrapper[4816]: I0216 15:05:10.903587 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:11 crc kubenswrapper[4816]: I0216 15:05:11.415002 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7glzr"] Feb 16 15:05:12 crc kubenswrapper[4816]: I0216 15:05:12.422523 4816 generic.go:334] "Generic (PLEG): container finished" podID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerID="c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c" exitCode=0 Feb 16 15:05:12 crc kubenswrapper[4816]: I0216 15:05:12.422986 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7glzr" event={"ID":"925a6f8a-b866-45c3-8059-f3dfcfc7510f","Type":"ContainerDied","Data":"c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c"} Feb 16 15:05:12 crc kubenswrapper[4816]: I0216 15:05:12.423341 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7glzr" event={"ID":"925a6f8a-b866-45c3-8059-f3dfcfc7510f","Type":"ContainerStarted","Data":"9ff3e102b510be3b44e8ac6433574df9f5cfac43dd237581f4f55061608f9482"} Feb 16 15:05:12 crc kubenswrapper[4816]: I0216 15:05:12.426082 4816 generic.go:334] "Generic (PLEG): container finished" podID="78140e20-6208-46cc-a7a5-a64aa3d1dee2" containerID="71f2e7733020ef55548fd0dc11d31bffc8fea4bd88208d1875573832dd2e8936" exitCode=0 Feb 16 15:05:12 crc kubenswrapper[4816]: I0216 15:05:12.426146 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-24tb8" event={"ID":"78140e20-6208-46cc-a7a5-a64aa3d1dee2","Type":"ContainerDied","Data":"71f2e7733020ef55548fd0dc11d31bffc8fea4bd88208d1875573832dd2e8936"} Feb 16 15:05:13 crc kubenswrapper[4816]: I0216 15:05:13.445627 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7glzr" event={"ID":"925a6f8a-b866-45c3-8059-f3dfcfc7510f","Type":"ContainerStarted","Data":"aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c"} Feb 16 15:05:13 crc kubenswrapper[4816]: I0216 15:05:13.869825 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.035147 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ssh-key-openstack-cell1\") pod \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.035246 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ceph\") pod \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.035455 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52tz7\" (UniqueName: \"kubernetes.io/projected/78140e20-6208-46cc-a7a5-a64aa3d1dee2-kube-api-access-52tz7\") pod \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.035527 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-inventory\") pod \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\" (UID: \"78140e20-6208-46cc-a7a5-a64aa3d1dee2\") " Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.044507 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ceph" (OuterVolumeSpecName: "ceph") pod "78140e20-6208-46cc-a7a5-a64aa3d1dee2" (UID: "78140e20-6208-46cc-a7a5-a64aa3d1dee2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.044592 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78140e20-6208-46cc-a7a5-a64aa3d1dee2-kube-api-access-52tz7" (OuterVolumeSpecName: "kube-api-access-52tz7") pod "78140e20-6208-46cc-a7a5-a64aa3d1dee2" (UID: "78140e20-6208-46cc-a7a5-a64aa3d1dee2"). InnerVolumeSpecName "kube-api-access-52tz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.064955 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-inventory" (OuterVolumeSpecName: "inventory") pod "78140e20-6208-46cc-a7a5-a64aa3d1dee2" (UID: "78140e20-6208-46cc-a7a5-a64aa3d1dee2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.071683 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "78140e20-6208-46cc-a7a5-a64aa3d1dee2" (UID: "78140e20-6208-46cc-a7a5-a64aa3d1dee2"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.138183 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52tz7\" (UniqueName: \"kubernetes.io/projected/78140e20-6208-46cc-a7a5-a64aa3d1dee2-kube-api-access-52tz7\") on node \"crc\" DevicePath \"\"" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.138213 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.138222 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.138230 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78140e20-6208-46cc-a7a5-a64aa3d1dee2-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.460994 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-24tb8" event={"ID":"78140e20-6208-46cc-a7a5-a64aa3d1dee2","Type":"ContainerDied","Data":"c8c4f6d5d04082bee007de26bcc79ccda281442bdf45ffc8fc96dbbfc6ff33bd"} Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.461368 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8c4f6d5d04082bee007de26bcc79ccda281442bdf45ffc8fc96dbbfc6ff33bd" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.461015 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-24tb8" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.580399 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-vwcgn"] Feb 16 15:05:14 crc kubenswrapper[4816]: E0216 15:05:14.580983 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78140e20-6208-46cc-a7a5-a64aa3d1dee2" containerName="install-os-openstack-openstack-cell1" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.581009 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="78140e20-6208-46cc-a7a5-a64aa3d1dee2" containerName="install-os-openstack-openstack-cell1" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.581327 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="78140e20-6208-46cc-a7a5-a64aa3d1dee2" containerName="install-os-openstack-openstack-cell1" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.582421 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.584786 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.584871 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.585159 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.585175 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.593757 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-vwcgn"] Feb 16 15:05:14 crc kubenswrapper[4816]: E0216 15:05:14.666990 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod925a6f8a_b866_45c3_8059_f3dfcfc7510f.slice/crio-aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c.scope\": RecentStats: unable to find data in memory cache]" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.751145 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ceph\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.751363 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-inventory\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.751408 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvrqj\" (UniqueName: \"kubernetes.io/projected/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-kube-api-access-xvrqj\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.751444 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.853581 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-inventory\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.853673 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvrqj\" (UniqueName: \"kubernetes.io/projected/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-kube-api-access-xvrqj\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.853729 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.853805 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ceph\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.858749 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-inventory\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.858749 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.859539 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ceph\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.872348 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvrqj\" (UniqueName: \"kubernetes.io/projected/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-kube-api-access-xvrqj\") pod \"configure-os-openstack-openstack-cell1-vwcgn\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:14 crc kubenswrapper[4816]: I0216 15:05:14.916713 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:05:15 crc kubenswrapper[4816]: I0216 15:05:15.484119 4816 generic.go:334] "Generic (PLEG): container finished" podID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerID="aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c" exitCode=0 Feb 16 15:05:15 crc kubenswrapper[4816]: I0216 15:05:15.484632 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7glzr" event={"ID":"925a6f8a-b866-45c3-8059-f3dfcfc7510f","Type":"ContainerDied","Data":"aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c"} Feb 16 15:05:15 crc kubenswrapper[4816]: I0216 15:05:15.486094 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-vwcgn"] Feb 16 15:05:15 crc kubenswrapper[4816]: W0216 15:05:15.490688 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34da2db9_996b_4f0a_af6b_3b230f5b8a0a.slice/crio-1def86a77813bca7f49143ca8c9aa3e84bfe7e94d05f6a0e9125b6c5b8ed7ec3 WatchSource:0}: Error finding container 1def86a77813bca7f49143ca8c9aa3e84bfe7e94d05f6a0e9125b6c5b8ed7ec3: Status 404 returned error can't find the container with id 1def86a77813bca7f49143ca8c9aa3e84bfe7e94d05f6a0e9125b6c5b8ed7ec3 Feb 16 15:05:16 crc kubenswrapper[4816]: I0216 15:05:16.496412 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" event={"ID":"34da2db9-996b-4f0a-af6b-3b230f5b8a0a","Type":"ContainerStarted","Data":"94da798d212cc3e059615f8b1ecbeb6cb32872f4841f4bc8b80627171b7fdc8b"} Feb 16 15:05:16 crc kubenswrapper[4816]: I0216 15:05:16.497395 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" event={"ID":"34da2db9-996b-4f0a-af6b-3b230f5b8a0a","Type":"ContainerStarted","Data":"1def86a77813bca7f49143ca8c9aa3e84bfe7e94d05f6a0e9125b6c5b8ed7ec3"} Feb 16 15:05:16 crc kubenswrapper[4816]: I0216 15:05:16.502737 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7glzr" event={"ID":"925a6f8a-b866-45c3-8059-f3dfcfc7510f","Type":"ContainerStarted","Data":"471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26"} Feb 16 15:05:16 crc kubenswrapper[4816]: I0216 15:05:16.514960 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" podStartSLOduration=2.033513449 podStartE2EDuration="2.514939751s" podCreationTimestamp="2026-02-16 15:05:14 +0000 UTC" firstStartedPulling="2026-02-16 15:05:15.496185773 +0000 UTC m=+7314.822899501" lastFinishedPulling="2026-02-16 15:05:15.977612075 +0000 UTC m=+7315.304325803" observedRunningTime="2026-02-16 15:05:16.514214621 +0000 UTC m=+7315.840928359" watchObservedRunningTime="2026-02-16 15:05:16.514939751 +0000 UTC m=+7315.841653479" Feb 16 15:05:16 crc kubenswrapper[4816]: I0216 15:05:16.542521 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7glzr" podStartSLOduration=3.058063244 podStartE2EDuration="6.542498852s" podCreationTimestamp="2026-02-16 15:05:10 +0000 UTC" firstStartedPulling="2026-02-16 15:05:12.424797343 +0000 UTC m=+7311.751511071" lastFinishedPulling="2026-02-16 15:05:15.909232941 +0000 UTC m=+7315.235946679" observedRunningTime="2026-02-16 15:05:16.539809519 +0000 UTC m=+7315.866523317" watchObservedRunningTime="2026-02-16 15:05:16.542498852 +0000 UTC m=+7315.869212580" Feb 16 15:05:20 crc kubenswrapper[4816]: I0216 15:05:20.904773 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:20 crc kubenswrapper[4816]: I0216 15:05:20.905550 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:21 crc kubenswrapper[4816]: I0216 15:05:21.949321 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-7glzr" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerName="registry-server" probeResult="failure" output=< Feb 16 15:05:21 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 15:05:21 crc kubenswrapper[4816]: > Feb 16 15:05:30 crc kubenswrapper[4816]: I0216 15:05:30.952013 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:31 crc kubenswrapper[4816]: I0216 15:05:31.018138 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:31 crc kubenswrapper[4816]: I0216 15:05:31.188630 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7glzr"] Feb 16 15:05:32 crc kubenswrapper[4816]: I0216 15:05:32.654432 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7glzr" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerName="registry-server" containerID="cri-o://471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26" gracePeriod=2 Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.341589 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.478509 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-catalog-content\") pod \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.478754 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-utilities\") pod \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.478787 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6lwk\" (UniqueName: \"kubernetes.io/projected/925a6f8a-b866-45c3-8059-f3dfcfc7510f-kube-api-access-f6lwk\") pod \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\" (UID: \"925a6f8a-b866-45c3-8059-f3dfcfc7510f\") " Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.479672 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-utilities" (OuterVolumeSpecName: "utilities") pod "925a6f8a-b866-45c3-8059-f3dfcfc7510f" (UID: "925a6f8a-b866-45c3-8059-f3dfcfc7510f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.494210 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925a6f8a-b866-45c3-8059-f3dfcfc7510f-kube-api-access-f6lwk" (OuterVolumeSpecName: "kube-api-access-f6lwk") pod "925a6f8a-b866-45c3-8059-f3dfcfc7510f" (UID: "925a6f8a-b866-45c3-8059-f3dfcfc7510f"). InnerVolumeSpecName "kube-api-access-f6lwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.529314 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "925a6f8a-b866-45c3-8059-f3dfcfc7510f" (UID: "925a6f8a-b866-45c3-8059-f3dfcfc7510f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.581383 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.581418 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6lwk\" (UniqueName: \"kubernetes.io/projected/925a6f8a-b866-45c3-8059-f3dfcfc7510f-kube-api-access-f6lwk\") on node \"crc\" DevicePath \"\"" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.581429 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/925a6f8a-b866-45c3-8059-f3dfcfc7510f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.665014 4816 generic.go:334] "Generic (PLEG): container finished" podID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerID="471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26" exitCode=0 Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.665082 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7glzr" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.665072 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7glzr" event={"ID":"925a6f8a-b866-45c3-8059-f3dfcfc7510f","Type":"ContainerDied","Data":"471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26"} Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.665473 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7glzr" event={"ID":"925a6f8a-b866-45c3-8059-f3dfcfc7510f","Type":"ContainerDied","Data":"9ff3e102b510be3b44e8ac6433574df9f5cfac43dd237581f4f55061608f9482"} Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.665503 4816 scope.go:117] "RemoveContainer" containerID="471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.688352 4816 scope.go:117] "RemoveContainer" containerID="aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.721878 4816 scope.go:117] "RemoveContainer" containerID="c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.745338 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7glzr"] Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.764368 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7glzr"] Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.765641 4816 scope.go:117] "RemoveContainer" containerID="471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26" Feb 16 15:05:33 crc kubenswrapper[4816]: E0216 15:05:33.766227 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26\": container with ID starting with 471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26 not found: ID does not exist" containerID="471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.766283 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26"} err="failed to get container status \"471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26\": rpc error: code = NotFound desc = could not find container \"471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26\": container with ID starting with 471e9dc9653c4bcf4bfe5e0eab5d6813a5471460311948db0e1a6ea19243da26 not found: ID does not exist" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.766320 4816 scope.go:117] "RemoveContainer" containerID="aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c" Feb 16 15:05:33 crc kubenswrapper[4816]: E0216 15:05:33.766758 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c\": container with ID starting with aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c not found: ID does not exist" containerID="aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.766787 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c"} err="failed to get container status \"aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c\": rpc error: code = NotFound desc = could not find container \"aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c\": container with ID starting with aa179e40a9e22c6c8a9cd82e9d3558ceeb55e395c58a964da21b568b51dccb5c not found: ID does not exist" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.766808 4816 scope.go:117] "RemoveContainer" containerID="c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c" Feb 16 15:05:33 crc kubenswrapper[4816]: E0216 15:05:33.767088 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c\": container with ID starting with c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c not found: ID does not exist" containerID="c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c" Feb 16 15:05:33 crc kubenswrapper[4816]: I0216 15:05:33.767123 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c"} err="failed to get container status \"c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c\": rpc error: code = NotFound desc = could not find container \"c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c\": container with ID starting with c32d1a94c34aa12bfd1da2c714cdaa4421f00bf815e322573011182df035894c not found: ID does not exist" Feb 16 15:05:35 crc kubenswrapper[4816]: I0216 15:05:35.416211 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" path="/var/lib/kubelet/pods/925a6f8a-b866-45c3-8059-f3dfcfc7510f/volumes" Feb 16 15:06:00 crc kubenswrapper[4816]: I0216 15:06:00.959830 4816 generic.go:334] "Generic (PLEG): container finished" podID="34da2db9-996b-4f0a-af6b-3b230f5b8a0a" containerID="94da798d212cc3e059615f8b1ecbeb6cb32872f4841f4bc8b80627171b7fdc8b" exitCode=0 Feb 16 15:06:00 crc kubenswrapper[4816]: I0216 15:06:00.959949 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" event={"ID":"34da2db9-996b-4f0a-af6b-3b230f5b8a0a","Type":"ContainerDied","Data":"94da798d212cc3e059615f8b1ecbeb6cb32872f4841f4bc8b80627171b7fdc8b"} Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.513005 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.593510 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ceph\") pod \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.593721 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvrqj\" (UniqueName: \"kubernetes.io/projected/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-kube-api-access-xvrqj\") pod \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.593773 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-inventory\") pod \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.594084 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ssh-key-openstack-cell1\") pod \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\" (UID: \"34da2db9-996b-4f0a-af6b-3b230f5b8a0a\") " Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.599301 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-kube-api-access-xvrqj" (OuterVolumeSpecName: "kube-api-access-xvrqj") pod "34da2db9-996b-4f0a-af6b-3b230f5b8a0a" (UID: "34da2db9-996b-4f0a-af6b-3b230f5b8a0a"). InnerVolumeSpecName "kube-api-access-xvrqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.599648 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ceph" (OuterVolumeSpecName: "ceph") pod "34da2db9-996b-4f0a-af6b-3b230f5b8a0a" (UID: "34da2db9-996b-4f0a-af6b-3b230f5b8a0a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.624026 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "34da2db9-996b-4f0a-af6b-3b230f5b8a0a" (UID: "34da2db9-996b-4f0a-af6b-3b230f5b8a0a"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.624162 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-inventory" (OuterVolumeSpecName: "inventory") pod "34da2db9-996b-4f0a-af6b-3b230f5b8a0a" (UID: "34da2db9-996b-4f0a-af6b-3b230f5b8a0a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.696706 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.696743 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.696755 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvrqj\" (UniqueName: \"kubernetes.io/projected/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-kube-api-access-xvrqj\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.696765 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34da2db9-996b-4f0a-af6b-3b230f5b8a0a-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.978322 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" event={"ID":"34da2db9-996b-4f0a-af6b-3b230f5b8a0a","Type":"ContainerDied","Data":"1def86a77813bca7f49143ca8c9aa3e84bfe7e94d05f6a0e9125b6c5b8ed7ec3"} Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.978369 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1def86a77813bca7f49143ca8c9aa3e84bfe7e94d05f6a0e9125b6c5b8ed7ec3" Feb 16 15:06:02 crc kubenswrapper[4816]: I0216 15:06:02.978421 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-vwcgn" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.086218 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-wx6jl"] Feb 16 15:06:03 crc kubenswrapper[4816]: E0216 15:06:03.086673 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerName="extract-utilities" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.086691 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerName="extract-utilities" Feb 16 15:06:03 crc kubenswrapper[4816]: E0216 15:06:03.086706 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerName="registry-server" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.086723 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerName="registry-server" Feb 16 15:06:03 crc kubenswrapper[4816]: E0216 15:06:03.086758 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34da2db9-996b-4f0a-af6b-3b230f5b8a0a" containerName="configure-os-openstack-openstack-cell1" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.086765 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="34da2db9-996b-4f0a-af6b-3b230f5b8a0a" containerName="configure-os-openstack-openstack-cell1" Feb 16 15:06:03 crc kubenswrapper[4816]: E0216 15:06:03.086779 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerName="extract-content" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.086786 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerName="extract-content" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.086984 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="925a6f8a-b866-45c3-8059-f3dfcfc7510f" containerName="registry-server" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.087007 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="34da2db9-996b-4f0a-af6b-3b230f5b8a0a" containerName="configure-os-openstack-openstack-cell1" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.087808 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.093996 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.094284 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.094744 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.094924 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.099306 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-wx6jl"] Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.220791 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.221240 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-inventory-0\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.221485 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ceph\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.221616 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46g29\" (UniqueName: \"kubernetes.io/projected/0ecb519a-94e1-4adb-8356-c9836c9673ab-kube-api-access-46g29\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.323915 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ceph\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.323981 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46g29\" (UniqueName: \"kubernetes.io/projected/0ecb519a-94e1-4adb-8356-c9836c9673ab-kube-api-access-46g29\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.324087 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.324142 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-inventory-0\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.330218 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-inventory-0\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.330545 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.331455 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ceph\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.342647 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46g29\" (UniqueName: \"kubernetes.io/projected/0ecb519a-94e1-4adb-8356-c9836c9673ab-kube-api-access-46g29\") pod \"ssh-known-hosts-openstack-wx6jl\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:03 crc kubenswrapper[4816]: I0216 15:06:03.406311 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:04 crc kubenswrapper[4816]: I0216 15:06:04.018798 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-wx6jl"] Feb 16 15:06:04 crc kubenswrapper[4816]: W0216 15:06:04.023729 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ecb519a_94e1_4adb_8356_c9836c9673ab.slice/crio-a790d5bf5bfcc55a4500ed6c4ab532649cbf855e57b2b1f2c3831796364e734e WatchSource:0}: Error finding container a790d5bf5bfcc55a4500ed6c4ab532649cbf855e57b2b1f2c3831796364e734e: Status 404 returned error can't find the container with id a790d5bf5bfcc55a4500ed6c4ab532649cbf855e57b2b1f2c3831796364e734e Feb 16 15:06:05 crc kubenswrapper[4816]: I0216 15:06:05.013730 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-wx6jl" event={"ID":"0ecb519a-94e1-4adb-8356-c9836c9673ab","Type":"ContainerStarted","Data":"7e7c87c478f1e17c2347f1f221477d25dd6fd1b251369b0ad603d35986db83aa"} Feb 16 15:06:05 crc kubenswrapper[4816]: I0216 15:06:05.014029 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-wx6jl" event={"ID":"0ecb519a-94e1-4adb-8356-c9836c9673ab","Type":"ContainerStarted","Data":"a790d5bf5bfcc55a4500ed6c4ab532649cbf855e57b2b1f2c3831796364e734e"} Feb 16 15:06:13 crc kubenswrapper[4816]: I0216 15:06:13.103758 4816 generic.go:334] "Generic (PLEG): container finished" podID="0ecb519a-94e1-4adb-8356-c9836c9673ab" containerID="7e7c87c478f1e17c2347f1f221477d25dd6fd1b251369b0ad603d35986db83aa" exitCode=0 Feb 16 15:06:13 crc kubenswrapper[4816]: I0216 15:06:13.103870 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-wx6jl" event={"ID":"0ecb519a-94e1-4adb-8356-c9836c9673ab","Type":"ContainerDied","Data":"7e7c87c478f1e17c2347f1f221477d25dd6fd1b251369b0ad603d35986db83aa"} Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.586062 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.678377 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ceph\") pod \"0ecb519a-94e1-4adb-8356-c9836c9673ab\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.678427 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-inventory-0\") pod \"0ecb519a-94e1-4adb-8356-c9836c9673ab\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.678546 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46g29\" (UniqueName: \"kubernetes.io/projected/0ecb519a-94e1-4adb-8356-c9836c9673ab-kube-api-access-46g29\") pod \"0ecb519a-94e1-4adb-8356-c9836c9673ab\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.678583 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ssh-key-openstack-cell1\") pod \"0ecb519a-94e1-4adb-8356-c9836c9673ab\" (UID: \"0ecb519a-94e1-4adb-8356-c9836c9673ab\") " Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.685873 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ecb519a-94e1-4adb-8356-c9836c9673ab-kube-api-access-46g29" (OuterVolumeSpecName: "kube-api-access-46g29") pod "0ecb519a-94e1-4adb-8356-c9836c9673ab" (UID: "0ecb519a-94e1-4adb-8356-c9836c9673ab"). InnerVolumeSpecName "kube-api-access-46g29". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.688782 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ceph" (OuterVolumeSpecName: "ceph") pod "0ecb519a-94e1-4adb-8356-c9836c9673ab" (UID: "0ecb519a-94e1-4adb-8356-c9836c9673ab"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.714922 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "0ecb519a-94e1-4adb-8356-c9836c9673ab" (UID: "0ecb519a-94e1-4adb-8356-c9836c9673ab"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.715867 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "0ecb519a-94e1-4adb-8356-c9836c9673ab" (UID: "0ecb519a-94e1-4adb-8356-c9836c9673ab"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.781093 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.781127 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.781139 4816 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0ecb519a-94e1-4adb-8356-c9836c9673ab-inventory-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:14 crc kubenswrapper[4816]: I0216 15:06:14.781148 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46g29\" (UniqueName: \"kubernetes.io/projected/0ecb519a-94e1-4adb-8356-c9836c9673ab-kube-api-access-46g29\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.127464 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-wx6jl" event={"ID":"0ecb519a-94e1-4adb-8356-c9836c9673ab","Type":"ContainerDied","Data":"a790d5bf5bfcc55a4500ed6c4ab532649cbf855e57b2b1f2c3831796364e734e"} Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.127842 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a790d5bf5bfcc55a4500ed6c4ab532649cbf855e57b2b1f2c3831796364e734e" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.127527 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-wx6jl" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.232328 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-q4txh"] Feb 16 15:06:15 crc kubenswrapper[4816]: E0216 15:06:15.232814 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ecb519a-94e1-4adb-8356-c9836c9673ab" containerName="ssh-known-hosts-openstack" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.232831 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ecb519a-94e1-4adb-8356-c9836c9673ab" containerName="ssh-known-hosts-openstack" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.233080 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ecb519a-94e1-4adb-8356-c9836c9673ab" containerName="ssh-known-hosts-openstack" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.233889 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.239532 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.239537 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.239842 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.240006 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.258114 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-q4txh"] Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.394472 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-inventory\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.394553 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74ncg\" (UniqueName: \"kubernetes.io/projected/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-kube-api-access-74ncg\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.394962 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ssh-key-openstack-cell1\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.395029 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ceph\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.496936 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ssh-key-openstack-cell1\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.497328 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ceph\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.497459 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-inventory\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.497686 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74ncg\" (UniqueName: \"kubernetes.io/projected/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-kube-api-access-74ncg\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.504329 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ceph\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.518231 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-inventory\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.520644 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ssh-key-openstack-cell1\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.521479 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74ncg\" (UniqueName: \"kubernetes.io/projected/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-kube-api-access-74ncg\") pod \"run-os-openstack-openstack-cell1-q4txh\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:15 crc kubenswrapper[4816]: I0216 15:06:15.561116 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:16 crc kubenswrapper[4816]: I0216 15:06:16.173346 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-q4txh"] Feb 16 15:06:16 crc kubenswrapper[4816]: I0216 15:06:16.177754 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 15:06:17 crc kubenswrapper[4816]: I0216 15:06:17.182802 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-q4txh" event={"ID":"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10","Type":"ContainerStarted","Data":"90eb38de373c81f514dddc456dfdd3f0430c9329ff544168c5514cac04aece7c"} Feb 16 15:06:17 crc kubenswrapper[4816]: I0216 15:06:17.183644 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-q4txh" event={"ID":"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10","Type":"ContainerStarted","Data":"ff8734ef20c789e76075574294091089af5bc9348fb6dbb8c58158fe977359a0"} Feb 16 15:06:17 crc kubenswrapper[4816]: I0216 15:06:17.211612 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-q4txh" podStartSLOduration=1.686386307 podStartE2EDuration="2.211576951s" podCreationTimestamp="2026-02-16 15:06:15 +0000 UTC" firstStartedPulling="2026-02-16 15:06:16.177416773 +0000 UTC m=+7375.504130491" lastFinishedPulling="2026-02-16 15:06:16.702607397 +0000 UTC m=+7376.029321135" observedRunningTime="2026-02-16 15:06:17.200254843 +0000 UTC m=+7376.526968571" watchObservedRunningTime="2026-02-16 15:06:17.211576951 +0000 UTC m=+7376.538290709" Feb 16 15:06:25 crc kubenswrapper[4816]: I0216 15:06:25.281181 4816 generic.go:334] "Generic (PLEG): container finished" podID="6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10" containerID="90eb38de373c81f514dddc456dfdd3f0430c9329ff544168c5514cac04aece7c" exitCode=0 Feb 16 15:06:25 crc kubenswrapper[4816]: I0216 15:06:25.281300 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-q4txh" event={"ID":"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10","Type":"ContainerDied","Data":"90eb38de373c81f514dddc456dfdd3f0430c9329ff544168c5514cac04aece7c"} Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.789994 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.880090 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ceph\") pod \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.880164 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74ncg\" (UniqueName: \"kubernetes.io/projected/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-kube-api-access-74ncg\") pod \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.880323 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-inventory\") pod \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.880350 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ssh-key-openstack-cell1\") pod \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\" (UID: \"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10\") " Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.886363 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-kube-api-access-74ncg" (OuterVolumeSpecName: "kube-api-access-74ncg") pod "6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10" (UID: "6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10"). InnerVolumeSpecName "kube-api-access-74ncg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.889913 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ceph" (OuterVolumeSpecName: "ceph") pod "6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10" (UID: "6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.936475 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-inventory" (OuterVolumeSpecName: "inventory") pod "6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10" (UID: "6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.938037 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10" (UID: "6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.984227 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.984291 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74ncg\" (UniqueName: \"kubernetes.io/projected/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-kube-api-access-74ncg\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.984322 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:26 crc kubenswrapper[4816]: I0216 15:06:26.984348 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.305243 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-q4txh" event={"ID":"6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10","Type":"ContainerDied","Data":"ff8734ef20c789e76075574294091089af5bc9348fb6dbb8c58158fe977359a0"} Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.305287 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff8734ef20c789e76075574294091089af5bc9348fb6dbb8c58158fe977359a0" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.305297 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-q4txh" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.383372 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-mgsws"] Feb 16 15:06:27 crc kubenswrapper[4816]: E0216 15:06:27.383892 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10" containerName="run-os-openstack-openstack-cell1" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.383909 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10" containerName="run-os-openstack-openstack-cell1" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.384118 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10" containerName="run-os-openstack-openstack-cell1" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.384846 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.387231 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.387341 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.387645 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.388976 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.396364 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-mgsws"] Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.493615 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-inventory\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.493705 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ssh-key-openstack-cell1\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.493757 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ceph\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.493786 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5kqf\" (UniqueName: \"kubernetes.io/projected/2abfbb74-d7a6-46b1-937f-4a9a1f882215-kube-api-access-w5kqf\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.595782 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-inventory\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.595830 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ssh-key-openstack-cell1\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.595883 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ceph\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.595912 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5kqf\" (UniqueName: \"kubernetes.io/projected/2abfbb74-d7a6-46b1-937f-4a9a1f882215-kube-api-access-w5kqf\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.599384 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-inventory\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.600072 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ssh-key-openstack-cell1\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.600221 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ceph\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.612853 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5kqf\" (UniqueName: \"kubernetes.io/projected/2abfbb74-d7a6-46b1-937f-4a9a1f882215-kube-api-access-w5kqf\") pod \"reboot-os-openstack-openstack-cell1-mgsws\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:27 crc kubenswrapper[4816]: I0216 15:06:27.702544 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:28 crc kubenswrapper[4816]: I0216 15:06:28.329879 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-mgsws"] Feb 16 15:06:29 crc kubenswrapper[4816]: I0216 15:06:29.326099 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" event={"ID":"2abfbb74-d7a6-46b1-937f-4a9a1f882215","Type":"ContainerStarted","Data":"1cf1f9191b85a46afff560acdefb983b1d86497b614dd79602ffd01c898406e0"} Feb 16 15:06:29 crc kubenswrapper[4816]: I0216 15:06:29.326773 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" event={"ID":"2abfbb74-d7a6-46b1-937f-4a9a1f882215","Type":"ContainerStarted","Data":"2fa14b57754ce6cff08f932d5e6c398747a2aa34e5899bff96212cbb24e78424"} Feb 16 15:06:29 crc kubenswrapper[4816]: I0216 15:06:29.355473 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" podStartSLOduration=1.859950502 podStartE2EDuration="2.355453008s" podCreationTimestamp="2026-02-16 15:06:27 +0000 UTC" firstStartedPulling="2026-02-16 15:06:28.333857191 +0000 UTC m=+7387.660570919" lastFinishedPulling="2026-02-16 15:06:28.829359697 +0000 UTC m=+7388.156073425" observedRunningTime="2026-02-16 15:06:29.344090418 +0000 UTC m=+7388.670804156" watchObservedRunningTime="2026-02-16 15:06:29.355453008 +0000 UTC m=+7388.682166736" Feb 16 15:06:36 crc kubenswrapper[4816]: I0216 15:06:36.942902 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:06:36 crc kubenswrapper[4816]: I0216 15:06:36.943946 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:06:45 crc kubenswrapper[4816]: I0216 15:06:45.506418 4816 generic.go:334] "Generic (PLEG): container finished" podID="2abfbb74-d7a6-46b1-937f-4a9a1f882215" containerID="1cf1f9191b85a46afff560acdefb983b1d86497b614dd79602ffd01c898406e0" exitCode=0 Feb 16 15:06:45 crc kubenswrapper[4816]: I0216 15:06:45.506495 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" event={"ID":"2abfbb74-d7a6-46b1-937f-4a9a1f882215","Type":"ContainerDied","Data":"1cf1f9191b85a46afff560acdefb983b1d86497b614dd79602ffd01c898406e0"} Feb 16 15:06:46 crc kubenswrapper[4816]: I0216 15:06:46.958893 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.084402 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-inventory\") pod \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.084637 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5kqf\" (UniqueName: \"kubernetes.io/projected/2abfbb74-d7a6-46b1-937f-4a9a1f882215-kube-api-access-w5kqf\") pod \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.084702 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ssh-key-openstack-cell1\") pod \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.084883 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ceph\") pod \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\" (UID: \"2abfbb74-d7a6-46b1-937f-4a9a1f882215\") " Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.091239 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ceph" (OuterVolumeSpecName: "ceph") pod "2abfbb74-d7a6-46b1-937f-4a9a1f882215" (UID: "2abfbb74-d7a6-46b1-937f-4a9a1f882215"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.091345 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2abfbb74-d7a6-46b1-937f-4a9a1f882215-kube-api-access-w5kqf" (OuterVolumeSpecName: "kube-api-access-w5kqf") pod "2abfbb74-d7a6-46b1-937f-4a9a1f882215" (UID: "2abfbb74-d7a6-46b1-937f-4a9a1f882215"). InnerVolumeSpecName "kube-api-access-w5kqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.120617 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "2abfbb74-d7a6-46b1-937f-4a9a1f882215" (UID: "2abfbb74-d7a6-46b1-937f-4a9a1f882215"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.121382 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-inventory" (OuterVolumeSpecName: "inventory") pod "2abfbb74-d7a6-46b1-937f-4a9a1f882215" (UID: "2abfbb74-d7a6-46b1-937f-4a9a1f882215"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.187939 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.187981 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.187995 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5kqf\" (UniqueName: \"kubernetes.io/projected/2abfbb74-d7a6-46b1-937f-4a9a1f882215-kube-api-access-w5kqf\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.188011 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/2abfbb74-d7a6-46b1-937f-4a9a1f882215-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.536468 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" event={"ID":"2abfbb74-d7a6-46b1-937f-4a9a1f882215","Type":"ContainerDied","Data":"2fa14b57754ce6cff08f932d5e6c398747a2aa34e5899bff96212cbb24e78424"} Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.536811 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fa14b57754ce6cff08f932d5e6c398747a2aa34e5899bff96212cbb24e78424" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.536987 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-mgsws" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.631225 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-bfqhj"] Feb 16 15:06:47 crc kubenswrapper[4816]: E0216 15:06:47.631771 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2abfbb74-d7a6-46b1-937f-4a9a1f882215" containerName="reboot-os-openstack-openstack-cell1" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.631796 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="2abfbb74-d7a6-46b1-937f-4a9a1f882215" containerName="reboot-os-openstack-openstack-cell1" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.632113 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="2abfbb74-d7a6-46b1-937f-4a9a1f882215" containerName="reboot-os-openstack-openstack-cell1" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.634296 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.637729 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.637834 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.637738 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.638399 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.652750 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-bfqhj"] Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.704734 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.704788 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.704817 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.704870 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.704915 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ssh-key-openstack-cell1\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.704937 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.704971 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.705001 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-inventory\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.705086 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.705119 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4vnk\" (UniqueName: \"kubernetes.io/projected/e7eb1a12-445f-49cf-9958-d9cdccd07352-kube-api-access-m4vnk\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.705144 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ceph\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.705202 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806579 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806629 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806676 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806717 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ssh-key-openstack-cell1\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806739 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806761 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806788 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-inventory\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806846 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806874 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4vnk\" (UniqueName: \"kubernetes.io/projected/e7eb1a12-445f-49cf-9958-d9cdccd07352-kube-api-access-m4vnk\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806904 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ceph\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806950 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.806990 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.812421 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ceph\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.813244 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.813333 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.813965 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ssh-key-openstack-cell1\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.814324 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.814341 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-inventory\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.815006 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.815894 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.815971 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.817914 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.821875 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.824433 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4vnk\" (UniqueName: \"kubernetes.io/projected/e7eb1a12-445f-49cf-9958-d9cdccd07352-kube-api-access-m4vnk\") pod \"install-certs-openstack-openstack-cell1-bfqhj\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:47 crc kubenswrapper[4816]: I0216 15:06:47.966960 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:06:48 crc kubenswrapper[4816]: I0216 15:06:48.499610 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-bfqhj"] Feb 16 15:06:48 crc kubenswrapper[4816]: I0216 15:06:48.546605 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" event={"ID":"e7eb1a12-445f-49cf-9958-d9cdccd07352","Type":"ContainerStarted","Data":"4195c3274c5ccab6419ae4db9361046a7acf053b3a10cfd5b34addd556f68e8b"} Feb 16 15:06:49 crc kubenswrapper[4816]: I0216 15:06:49.564498 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" event={"ID":"e7eb1a12-445f-49cf-9958-d9cdccd07352","Type":"ContainerStarted","Data":"13842460bcd7191300f46cdb89ab8aa2bd8b9d84b00d0e9f4a82bfdd352e90b7"} Feb 16 15:06:49 crc kubenswrapper[4816]: I0216 15:06:49.589493 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" podStartSLOduration=2.182623284 podStartE2EDuration="2.589468824s" podCreationTimestamp="2026-02-16 15:06:47 +0000 UTC" firstStartedPulling="2026-02-16 15:06:48.501631821 +0000 UTC m=+7407.828345549" lastFinishedPulling="2026-02-16 15:06:48.908477371 +0000 UTC m=+7408.235191089" observedRunningTime="2026-02-16 15:06:49.587556641 +0000 UTC m=+7408.914270389" watchObservedRunningTime="2026-02-16 15:06:49.589468824 +0000 UTC m=+7408.916182582" Feb 16 15:07:06 crc kubenswrapper[4816]: I0216 15:07:06.940912 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:07:06 crc kubenswrapper[4816]: I0216 15:07:06.941437 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:07:08 crc kubenswrapper[4816]: I0216 15:07:08.798504 4816 generic.go:334] "Generic (PLEG): container finished" podID="e7eb1a12-445f-49cf-9958-d9cdccd07352" containerID="13842460bcd7191300f46cdb89ab8aa2bd8b9d84b00d0e9f4a82bfdd352e90b7" exitCode=0 Feb 16 15:07:08 crc kubenswrapper[4816]: I0216 15:07:08.798881 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" event={"ID":"e7eb1a12-445f-49cf-9958-d9cdccd07352","Type":"ContainerDied","Data":"13842460bcd7191300f46cdb89ab8aa2bd8b9d84b00d0e9f4a82bfdd352e90b7"} Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.345202 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.426744 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-metadata-combined-ca-bundle\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.427324 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-dhcp-combined-ca-bundle\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.427502 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ovn-combined-ca-bundle\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.428328 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-nova-combined-ca-bundle\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.434230 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.434553 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.434842 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.435862 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.531865 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-libvirt-combined-ca-bundle\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.532486 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-sriov-combined-ca-bundle\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.532674 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4vnk\" (UniqueName: \"kubernetes.io/projected/e7eb1a12-445f-49cf-9958-d9cdccd07352-kube-api-access-m4vnk\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.532802 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-inventory\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.532973 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ssh-key-openstack-cell1\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.533136 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-bootstrap-combined-ca-bundle\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.533275 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-telemetry-combined-ca-bundle\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.533447 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ceph\") pod \"e7eb1a12-445f-49cf-9958-d9cdccd07352\" (UID: \"e7eb1a12-445f-49cf-9958-d9cdccd07352\") " Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.534209 4816 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.534313 4816 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.534377 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.534451 4816 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.536147 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.537122 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7eb1a12-445f-49cf-9958-d9cdccd07352-kube-api-access-m4vnk" (OuterVolumeSpecName: "kube-api-access-m4vnk") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "kube-api-access-m4vnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.537798 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.537981 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ceph" (OuterVolumeSpecName: "ceph") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.539346 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.540578 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.568679 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-inventory" (OuterVolumeSpecName: "inventory") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.574903 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "e7eb1a12-445f-49cf-9958-d9cdccd07352" (UID: "e7eb1a12-445f-49cf-9958-d9cdccd07352"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.648391 4816 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.648441 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4vnk\" (UniqueName: \"kubernetes.io/projected/e7eb1a12-445f-49cf-9958-d9cdccd07352-kube-api-access-m4vnk\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.648463 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.648482 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.648500 4816 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.648517 4816 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.648535 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.648553 4816 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7eb1a12-445f-49cf-9958-d9cdccd07352-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.834585 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" event={"ID":"e7eb1a12-445f-49cf-9958-d9cdccd07352","Type":"ContainerDied","Data":"4195c3274c5ccab6419ae4db9361046a7acf053b3a10cfd5b34addd556f68e8b"} Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.834872 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-bfqhj" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.834981 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4195c3274c5ccab6419ae4db9361046a7acf053b3a10cfd5b34addd556f68e8b" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.945990 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-pkfsl"] Feb 16 15:07:10 crc kubenswrapper[4816]: E0216 15:07:10.946694 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7eb1a12-445f-49cf-9958-d9cdccd07352" containerName="install-certs-openstack-openstack-cell1" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.946731 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7eb1a12-445f-49cf-9958-d9cdccd07352" containerName="install-certs-openstack-openstack-cell1" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.947064 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7eb1a12-445f-49cf-9958-d9cdccd07352" containerName="install-certs-openstack-openstack-cell1" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.948236 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.951716 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.951833 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.951989 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.952175 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:07:10 crc kubenswrapper[4816]: I0216 15:07:10.958730 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-pkfsl"] Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.056092 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm2cb\" (UniqueName: \"kubernetes.io/projected/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-kube-api-access-lm2cb\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.056175 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ssh-key-openstack-cell1\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.056473 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ceph\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.056647 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-inventory\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.158819 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm2cb\" (UniqueName: \"kubernetes.io/projected/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-kube-api-access-lm2cb\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.158939 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ssh-key-openstack-cell1\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.159060 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ceph\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.159120 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-inventory\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.163455 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ceph\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.166153 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ssh-key-openstack-cell1\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.167336 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-inventory\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.188560 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm2cb\" (UniqueName: \"kubernetes.io/projected/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-kube-api-access-lm2cb\") pod \"ceph-client-openstack-openstack-cell1-pkfsl\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:11 crc kubenswrapper[4816]: I0216 15:07:11.281015 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:12 crc kubenswrapper[4816]: I0216 15:07:12.124939 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-pkfsl"] Feb 16 15:07:12 crc kubenswrapper[4816]: I0216 15:07:12.857617 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" event={"ID":"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a","Type":"ContainerStarted","Data":"6571459f6c2ea6ebd4d5ba4a5746cc4020b35b76667b37e332fb3ced1facad49"} Feb 16 15:07:12 crc kubenswrapper[4816]: I0216 15:07:12.857952 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" event={"ID":"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a","Type":"ContainerStarted","Data":"81c40e7ef6dde5cc28320f14f578e739eee4e31d38fa449337795182173c6f24"} Feb 16 15:07:12 crc kubenswrapper[4816]: I0216 15:07:12.875805 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" podStartSLOduration=2.480639287 podStartE2EDuration="2.875757537s" podCreationTimestamp="2026-02-16 15:07:10 +0000 UTC" firstStartedPulling="2026-02-16 15:07:12.136896487 +0000 UTC m=+7431.463610225" lastFinishedPulling="2026-02-16 15:07:12.532014747 +0000 UTC m=+7431.858728475" observedRunningTime="2026-02-16 15:07:12.871286806 +0000 UTC m=+7432.198000524" watchObservedRunningTime="2026-02-16 15:07:12.875757537 +0000 UTC m=+7432.202471275" Feb 16 15:07:17 crc kubenswrapper[4816]: I0216 15:07:17.920399 4816 generic.go:334] "Generic (PLEG): container finished" podID="1cdc4b89-7e53-4a42-8d37-afbf8b153f9a" containerID="6571459f6c2ea6ebd4d5ba4a5746cc4020b35b76667b37e332fb3ced1facad49" exitCode=0 Feb 16 15:07:17 crc kubenswrapper[4816]: I0216 15:07:17.920509 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" event={"ID":"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a","Type":"ContainerDied","Data":"6571459f6c2ea6ebd4d5ba4a5746cc4020b35b76667b37e332fb3ced1facad49"} Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.416211 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.535409 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lm2cb\" (UniqueName: \"kubernetes.io/projected/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-kube-api-access-lm2cb\") pod \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.535462 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ceph\") pod \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.535576 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ssh-key-openstack-cell1\") pod \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.535682 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-inventory\") pod \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\" (UID: \"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a\") " Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.541522 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-kube-api-access-lm2cb" (OuterVolumeSpecName: "kube-api-access-lm2cb") pod "1cdc4b89-7e53-4a42-8d37-afbf8b153f9a" (UID: "1cdc4b89-7e53-4a42-8d37-afbf8b153f9a"). InnerVolumeSpecName "kube-api-access-lm2cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.541804 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ceph" (OuterVolumeSpecName: "ceph") pod "1cdc4b89-7e53-4a42-8d37-afbf8b153f9a" (UID: "1cdc4b89-7e53-4a42-8d37-afbf8b153f9a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.566707 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "1cdc4b89-7e53-4a42-8d37-afbf8b153f9a" (UID: "1cdc4b89-7e53-4a42-8d37-afbf8b153f9a"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.569309 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-inventory" (OuterVolumeSpecName: "inventory") pod "1cdc4b89-7e53-4a42-8d37-afbf8b153f9a" (UID: "1cdc4b89-7e53-4a42-8d37-afbf8b153f9a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.638794 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.639116 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.639129 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lm2cb\" (UniqueName: \"kubernetes.io/projected/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-kube-api-access-lm2cb\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.639139 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cdc4b89-7e53-4a42-8d37-afbf8b153f9a-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.943116 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" event={"ID":"1cdc4b89-7e53-4a42-8d37-afbf8b153f9a","Type":"ContainerDied","Data":"81c40e7ef6dde5cc28320f14f578e739eee4e31d38fa449337795182173c6f24"} Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.943181 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81c40e7ef6dde5cc28320f14f578e739eee4e31d38fa449337795182173c6f24" Feb 16 15:07:19 crc kubenswrapper[4816]: I0216 15:07:19.943256 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-pkfsl" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.038106 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-smvzm"] Feb 16 15:07:20 crc kubenswrapper[4816]: E0216 15:07:20.038671 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cdc4b89-7e53-4a42-8d37-afbf8b153f9a" containerName="ceph-client-openstack-openstack-cell1" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.038692 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cdc4b89-7e53-4a42-8d37-afbf8b153f9a" containerName="ceph-client-openstack-openstack-cell1" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.039090 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cdc4b89-7e53-4a42-8d37-afbf8b153f9a" containerName="ceph-client-openstack-openstack-cell1" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.040134 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.045198 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.047061 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.047301 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.047529 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.047720 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.056458 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-smvzm"] Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.148628 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ceph\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.148697 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.148740 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.148832 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ssh-key-openstack-cell1\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.148885 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-inventory\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.148903 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffqmt\" (UniqueName: \"kubernetes.io/projected/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-kube-api-access-ffqmt\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.250580 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ssh-key-openstack-cell1\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.250704 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-inventory\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.250736 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffqmt\" (UniqueName: \"kubernetes.io/projected/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-kube-api-access-ffqmt\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.250826 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ceph\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.251260 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.252280 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.252379 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.255469 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ssh-key-openstack-cell1\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.258400 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.258589 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-inventory\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.258842 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ceph\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.267798 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffqmt\" (UniqueName: \"kubernetes.io/projected/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-kube-api-access-ffqmt\") pod \"ovn-openstack-openstack-cell1-smvzm\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.363530 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.920046 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-smvzm"] Feb 16 15:07:20 crc kubenswrapper[4816]: I0216 15:07:20.953290 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-smvzm" event={"ID":"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36","Type":"ContainerStarted","Data":"0173f07174ce651b29a8d21a5a13c795ded1505b7f0ae20b5fc7847980296acb"} Feb 16 15:07:21 crc kubenswrapper[4816]: I0216 15:07:21.411671 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:07:21 crc kubenswrapper[4816]: I0216 15:07:21.963923 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-smvzm" event={"ID":"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36","Type":"ContainerStarted","Data":"a96dc86412579d810279d3b46af37a13c60fe95249736de6e85dec9ff071d436"} Feb 16 15:07:21 crc kubenswrapper[4816]: I0216 15:07:21.988813 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-smvzm" podStartSLOduration=1.493630412 podStartE2EDuration="1.988773599s" podCreationTimestamp="2026-02-16 15:07:20 +0000 UTC" firstStartedPulling="2026-02-16 15:07:20.912750179 +0000 UTC m=+7440.239463907" lastFinishedPulling="2026-02-16 15:07:21.407893366 +0000 UTC m=+7440.734607094" observedRunningTime="2026-02-16 15:07:21.980260277 +0000 UTC m=+7441.306974015" watchObservedRunningTime="2026-02-16 15:07:21.988773599 +0000 UTC m=+7441.315487327" Feb 16 15:07:36 crc kubenswrapper[4816]: I0216 15:07:36.940993 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:07:36 crc kubenswrapper[4816]: I0216 15:07:36.941500 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:07:36 crc kubenswrapper[4816]: I0216 15:07:36.941543 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 15:07:36 crc kubenswrapper[4816]: I0216 15:07:36.942330 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 15:07:36 crc kubenswrapper[4816]: I0216 15:07:36.942406 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" gracePeriod=600 Feb 16 15:07:37 crc kubenswrapper[4816]: E0216 15:07:37.074823 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:07:37 crc kubenswrapper[4816]: I0216 15:07:37.145268 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" exitCode=0 Feb 16 15:07:37 crc kubenswrapper[4816]: I0216 15:07:37.145513 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2"} Feb 16 15:07:37 crc kubenswrapper[4816]: I0216 15:07:37.145555 4816 scope.go:117] "RemoveContainer" containerID="7b7768d35603acc2ec6f8a3a3978f27ffd1b0d30c9fd3424095a1924ca5f68fd" Feb 16 15:07:37 crc kubenswrapper[4816]: I0216 15:07:37.147085 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:07:37 crc kubenswrapper[4816]: E0216 15:07:37.147440 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:07:48 crc kubenswrapper[4816]: I0216 15:07:48.399150 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:07:48 crc kubenswrapper[4816]: E0216 15:07:48.400051 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:07:59 crc kubenswrapper[4816]: I0216 15:07:59.398793 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:07:59 crc kubenswrapper[4816]: E0216 15:07:59.399598 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:08:12 crc kubenswrapper[4816]: I0216 15:08:12.398758 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:08:12 crc kubenswrapper[4816]: E0216 15:08:12.399790 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:08:24 crc kubenswrapper[4816]: I0216 15:08:24.631845 4816 generic.go:334] "Generic (PLEG): container finished" podID="1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" containerID="a96dc86412579d810279d3b46af37a13c60fe95249736de6e85dec9ff071d436" exitCode=0 Feb 16 15:08:24 crc kubenswrapper[4816]: I0216 15:08:24.631950 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-smvzm" event={"ID":"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36","Type":"ContainerDied","Data":"a96dc86412579d810279d3b46af37a13c60fe95249736de6e85dec9ff071d436"} Feb 16 15:08:25 crc kubenswrapper[4816]: I0216 15:08:25.399260 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:08:25 crc kubenswrapper[4816]: E0216 15:08:25.399540 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.097404 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.212497 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ceph\") pod \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.212544 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-inventory\") pod \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.212604 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovncontroller-config-0\") pod \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.212682 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ssh-key-openstack-cell1\") pod \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.212751 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffqmt\" (UniqueName: \"kubernetes.io/projected/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-kube-api-access-ffqmt\") pod \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.212775 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovn-combined-ca-bundle\") pod \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\" (UID: \"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36\") " Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.218496 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" (UID: "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.218625 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ceph" (OuterVolumeSpecName: "ceph") pod "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" (UID: "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.224886 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-kube-api-access-ffqmt" (OuterVolumeSpecName: "kube-api-access-ffqmt") pod "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" (UID: "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36"). InnerVolumeSpecName "kube-api-access-ffqmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.241564 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" (UID: "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.243581 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" (UID: "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.253861 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-inventory" (OuterVolumeSpecName: "inventory") pod "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" (UID: "1fdfbfb5-72fa-43c9-ab7c-e824328f5a36"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.316799 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffqmt\" (UniqueName: \"kubernetes.io/projected/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-kube-api-access-ffqmt\") on node \"crc\" DevicePath \"\"" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.316850 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.316864 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.316878 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.316891 4816 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.316906 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1fdfbfb5-72fa-43c9-ab7c-e824328f5a36-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.656062 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-smvzm" event={"ID":"1fdfbfb5-72fa-43c9-ab7c-e824328f5a36","Type":"ContainerDied","Data":"0173f07174ce651b29a8d21a5a13c795ded1505b7f0ae20b5fc7847980296acb"} Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.656419 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0173f07174ce651b29a8d21a5a13c795ded1505b7f0ae20b5fc7847980296acb" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.656095 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-smvzm" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.773976 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-tggb5"] Feb 16 15:08:26 crc kubenswrapper[4816]: E0216 15:08:26.774520 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" containerName="ovn-openstack-openstack-cell1" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.774539 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" containerName="ovn-openstack-openstack-cell1" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.774919 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fdfbfb5-72fa-43c9-ab7c-e824328f5a36" containerName="ovn-openstack-openstack-cell1" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.775766 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.778725 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.779170 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.779313 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.779356 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.779482 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.779549 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.789545 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-tggb5"] Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.829487 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.829541 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.829616 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.829716 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2h2c\" (UniqueName: \"kubernetes.io/projected/488534f7-df72-4134-8aef-f7812bb0a497-kube-api-access-v2h2c\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.829749 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ssh-key-openstack-cell1\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.829847 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.829916 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.932285 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.932453 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.932549 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.932602 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.932741 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.932850 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2h2c\" (UniqueName: \"kubernetes.io/projected/488534f7-df72-4134-8aef-f7812bb0a497-kube-api-access-v2h2c\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.932944 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ssh-key-openstack-cell1\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.936602 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.936811 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.937404 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.938143 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ssh-key-openstack-cell1\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.940401 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.940401 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:26 crc kubenswrapper[4816]: I0216 15:08:26.958231 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2h2c\" (UniqueName: \"kubernetes.io/projected/488534f7-df72-4134-8aef-f7812bb0a497-kube-api-access-v2h2c\") pod \"neutron-metadata-openstack-openstack-cell1-tggb5\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:27 crc kubenswrapper[4816]: I0216 15:08:27.112691 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:08:27 crc kubenswrapper[4816]: I0216 15:08:27.535209 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-tggb5"] Feb 16 15:08:27 crc kubenswrapper[4816]: I0216 15:08:27.665912 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" event={"ID":"488534f7-df72-4134-8aef-f7812bb0a497","Type":"ContainerStarted","Data":"8e24f870703dadbe5bf2e28e37552356d27bd7a4d71b5490fea58c5b93242d43"} Feb 16 15:08:28 crc kubenswrapper[4816]: I0216 15:08:28.675231 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" event={"ID":"488534f7-df72-4134-8aef-f7812bb0a497","Type":"ContainerStarted","Data":"627668fa5e6265c48f7573e977134aa38eee74bbb091f53c85baadc96fadadfd"} Feb 16 15:08:28 crc kubenswrapper[4816]: I0216 15:08:28.712987 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" podStartSLOduration=2.2397076399999998 podStartE2EDuration="2.71296106s" podCreationTimestamp="2026-02-16 15:08:26 +0000 UTC" firstStartedPulling="2026-02-16 15:08:27.530991912 +0000 UTC m=+7506.857705640" lastFinishedPulling="2026-02-16 15:08:28.004245332 +0000 UTC m=+7507.330959060" observedRunningTime="2026-02-16 15:08:28.703571923 +0000 UTC m=+7508.030285661" watchObservedRunningTime="2026-02-16 15:08:28.71296106 +0000 UTC m=+7508.039674788" Feb 16 15:08:38 crc kubenswrapper[4816]: I0216 15:08:38.398899 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:08:38 crc kubenswrapper[4816]: E0216 15:08:38.399832 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:08:50 crc kubenswrapper[4816]: I0216 15:08:50.398797 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:08:50 crc kubenswrapper[4816]: E0216 15:08:50.399881 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.063114 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5kvrr"] Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.066150 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.083699 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5kvrr"] Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.165028 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdgfj\" (UniqueName: \"kubernetes.io/projected/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-kube-api-access-rdgfj\") pod \"redhat-operators-5kvrr\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.165091 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-utilities\") pod \"redhat-operators-5kvrr\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.165113 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-catalog-content\") pod \"redhat-operators-5kvrr\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.266860 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdgfj\" (UniqueName: \"kubernetes.io/projected/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-kube-api-access-rdgfj\") pod \"redhat-operators-5kvrr\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.267183 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-utilities\") pod \"redhat-operators-5kvrr\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.267304 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-catalog-content\") pod \"redhat-operators-5kvrr\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.267691 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-utilities\") pod \"redhat-operators-5kvrr\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.267744 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-catalog-content\") pod \"redhat-operators-5kvrr\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.286000 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdgfj\" (UniqueName: \"kubernetes.io/projected/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-kube-api-access-rdgfj\") pod \"redhat-operators-5kvrr\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.427396 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.960721 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5kvrr"] Feb 16 15:08:57 crc kubenswrapper[4816]: I0216 15:08:57.968416 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5kvrr" event={"ID":"0dffeb91-4db9-49ec-b8f7-c3f301824a2e","Type":"ContainerStarted","Data":"f3b1a461b1a86bdc65e3174bd82b7f782792919ad01b6cd3eecab36aeb3c2d30"} Feb 16 15:08:58 crc kubenswrapper[4816]: I0216 15:08:58.980045 4816 generic.go:334] "Generic (PLEG): container finished" podID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerID="e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f" exitCode=0 Feb 16 15:08:58 crc kubenswrapper[4816]: I0216 15:08:58.980142 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5kvrr" event={"ID":"0dffeb91-4db9-49ec-b8f7-c3f301824a2e","Type":"ContainerDied","Data":"e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f"} Feb 16 15:08:59 crc kubenswrapper[4816]: I0216 15:08:59.993329 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5kvrr" event={"ID":"0dffeb91-4db9-49ec-b8f7-c3f301824a2e","Type":"ContainerStarted","Data":"b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b"} Feb 16 15:09:04 crc kubenswrapper[4816]: I0216 15:09:04.066674 4816 generic.go:334] "Generic (PLEG): container finished" podID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerID="b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b" exitCode=0 Feb 16 15:09:04 crc kubenswrapper[4816]: I0216 15:09:04.066703 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5kvrr" event={"ID":"0dffeb91-4db9-49ec-b8f7-c3f301824a2e","Type":"ContainerDied","Data":"b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b"} Feb 16 15:09:04 crc kubenswrapper[4816]: I0216 15:09:04.398626 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:09:04 crc kubenswrapper[4816]: E0216 15:09:04.399161 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:09:05 crc kubenswrapper[4816]: I0216 15:09:05.148073 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5kvrr" event={"ID":"0dffeb91-4db9-49ec-b8f7-c3f301824a2e","Type":"ContainerStarted","Data":"ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826"} Feb 16 15:09:05 crc kubenswrapper[4816]: I0216 15:09:05.179052 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5kvrr" podStartSLOduration=2.450588951 podStartE2EDuration="8.179033877s" podCreationTimestamp="2026-02-16 15:08:57 +0000 UTC" firstStartedPulling="2026-02-16 15:08:58.982698717 +0000 UTC m=+7538.309412455" lastFinishedPulling="2026-02-16 15:09:04.711143633 +0000 UTC m=+7544.037857381" observedRunningTime="2026-02-16 15:09:05.168221402 +0000 UTC m=+7544.494935130" watchObservedRunningTime="2026-02-16 15:09:05.179033877 +0000 UTC m=+7544.505747605" Feb 16 15:09:07 crc kubenswrapper[4816]: I0216 15:09:07.427719 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:09:07 crc kubenswrapper[4816]: I0216 15:09:07.428192 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:09:08 crc kubenswrapper[4816]: I0216 15:09:08.476057 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5kvrr" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="registry-server" probeResult="failure" output=< Feb 16 15:09:08 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 15:09:08 crc kubenswrapper[4816]: > Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.276827 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qbxqm"] Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.279383 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.294687 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qbxqm"] Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.463196 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-utilities\") pod \"certified-operators-qbxqm\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.463329 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv6gz\" (UniqueName: \"kubernetes.io/projected/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-kube-api-access-nv6gz\") pod \"certified-operators-qbxqm\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.463367 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-catalog-content\") pod \"certified-operators-qbxqm\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.565458 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv6gz\" (UniqueName: \"kubernetes.io/projected/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-kube-api-access-nv6gz\") pod \"certified-operators-qbxqm\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.565530 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-catalog-content\") pod \"certified-operators-qbxqm\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.565697 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-utilities\") pod \"certified-operators-qbxqm\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.566153 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-catalog-content\") pod \"certified-operators-qbxqm\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.566278 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-utilities\") pod \"certified-operators-qbxqm\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.590723 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv6gz\" (UniqueName: \"kubernetes.io/projected/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-kube-api-access-nv6gz\") pod \"certified-operators-qbxqm\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:12 crc kubenswrapper[4816]: I0216 15:09:12.601598 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:13 crc kubenswrapper[4816]: I0216 15:09:13.088603 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qbxqm"] Feb 16 15:09:13 crc kubenswrapper[4816]: I0216 15:09:13.638917 4816 generic.go:334] "Generic (PLEG): container finished" podID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerID="2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5" exitCode=0 Feb 16 15:09:13 crc kubenswrapper[4816]: I0216 15:09:13.639024 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbxqm" event={"ID":"7eef047f-0c37-49ab-8ecc-bbd4173e8e43","Type":"ContainerDied","Data":"2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5"} Feb 16 15:09:13 crc kubenswrapper[4816]: I0216 15:09:13.639262 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbxqm" event={"ID":"7eef047f-0c37-49ab-8ecc-bbd4173e8e43","Type":"ContainerStarted","Data":"92c9b2551bb10d601e1c1bce0de8b3773c899dae52ab13627aceed826621d2a8"} Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.629977 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4gjjf"] Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.633343 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.643285 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4gjjf"] Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.651456 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbxqm" event={"ID":"7eef047f-0c37-49ab-8ecc-bbd4173e8e43","Type":"ContainerStarted","Data":"b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794"} Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.733481 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9hcz\" (UniqueName: \"kubernetes.io/projected/29e62fcf-7477-48b5-add2-f9ca068f2dca-kube-api-access-g9hcz\") pod \"redhat-marketplace-4gjjf\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.733580 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-utilities\") pod \"redhat-marketplace-4gjjf\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.733752 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-catalog-content\") pod \"redhat-marketplace-4gjjf\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.835529 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-utilities\") pod \"redhat-marketplace-4gjjf\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.835717 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-catalog-content\") pod \"redhat-marketplace-4gjjf\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.835844 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9hcz\" (UniqueName: \"kubernetes.io/projected/29e62fcf-7477-48b5-add2-f9ca068f2dca-kube-api-access-g9hcz\") pod \"redhat-marketplace-4gjjf\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.836107 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-utilities\") pod \"redhat-marketplace-4gjjf\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.836130 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-catalog-content\") pod \"redhat-marketplace-4gjjf\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.858730 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9hcz\" (UniqueName: \"kubernetes.io/projected/29e62fcf-7477-48b5-add2-f9ca068f2dca-kube-api-access-g9hcz\") pod \"redhat-marketplace-4gjjf\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:14 crc kubenswrapper[4816]: I0216 15:09:14.987756 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:15 crc kubenswrapper[4816]: I0216 15:09:15.546585 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4gjjf"] Feb 16 15:09:15 crc kubenswrapper[4816]: I0216 15:09:15.671361 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4gjjf" event={"ID":"29e62fcf-7477-48b5-add2-f9ca068f2dca","Type":"ContainerStarted","Data":"97087228601c43199508e676b434c88b5df431e67dcb93b761e2b7582162ed5d"} Feb 16 15:09:16 crc kubenswrapper[4816]: I0216 15:09:16.685135 4816 generic.go:334] "Generic (PLEG): container finished" podID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerID="6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b" exitCode=0 Feb 16 15:09:16 crc kubenswrapper[4816]: I0216 15:09:16.685256 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4gjjf" event={"ID":"29e62fcf-7477-48b5-add2-f9ca068f2dca","Type":"ContainerDied","Data":"6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b"} Feb 16 15:09:16 crc kubenswrapper[4816]: I0216 15:09:16.716816 4816 generic.go:334] "Generic (PLEG): container finished" podID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerID="b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794" exitCode=0 Feb 16 15:09:16 crc kubenswrapper[4816]: I0216 15:09:16.716864 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbxqm" event={"ID":"7eef047f-0c37-49ab-8ecc-bbd4173e8e43","Type":"ContainerDied","Data":"b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794"} Feb 16 15:09:17 crc kubenswrapper[4816]: I0216 15:09:17.740432 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4gjjf" event={"ID":"29e62fcf-7477-48b5-add2-f9ca068f2dca","Type":"ContainerStarted","Data":"46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a"} Feb 16 15:09:17 crc kubenswrapper[4816]: I0216 15:09:17.747225 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbxqm" event={"ID":"7eef047f-0c37-49ab-8ecc-bbd4173e8e43","Type":"ContainerStarted","Data":"157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33"} Feb 16 15:09:17 crc kubenswrapper[4816]: I0216 15:09:17.777854 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qbxqm" podStartSLOduration=2.000080949 podStartE2EDuration="5.777830483s" podCreationTimestamp="2026-02-16 15:09:12 +0000 UTC" firstStartedPulling="2026-02-16 15:09:13.640868348 +0000 UTC m=+7552.967582076" lastFinishedPulling="2026-02-16 15:09:17.418617882 +0000 UTC m=+7556.745331610" observedRunningTime="2026-02-16 15:09:17.771437559 +0000 UTC m=+7557.098151317" watchObservedRunningTime="2026-02-16 15:09:17.777830483 +0000 UTC m=+7557.104544211" Feb 16 15:09:18 crc kubenswrapper[4816]: I0216 15:09:18.398254 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:09:18 crc kubenswrapper[4816]: E0216 15:09:18.398807 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:09:18 crc kubenswrapper[4816]: I0216 15:09:18.497915 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5kvrr" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="registry-server" probeResult="failure" output=< Feb 16 15:09:18 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 15:09:18 crc kubenswrapper[4816]: > Feb 16 15:09:18 crc kubenswrapper[4816]: I0216 15:09:18.759889 4816 generic.go:334] "Generic (PLEG): container finished" podID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerID="46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a" exitCode=0 Feb 16 15:09:18 crc kubenswrapper[4816]: I0216 15:09:18.759946 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4gjjf" event={"ID":"29e62fcf-7477-48b5-add2-f9ca068f2dca","Type":"ContainerDied","Data":"46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a"} Feb 16 15:09:20 crc kubenswrapper[4816]: I0216 15:09:20.788071 4816 generic.go:334] "Generic (PLEG): container finished" podID="488534f7-df72-4134-8aef-f7812bb0a497" containerID="627668fa5e6265c48f7573e977134aa38eee74bbb091f53c85baadc96fadadfd" exitCode=0 Feb 16 15:09:20 crc kubenswrapper[4816]: I0216 15:09:20.788134 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" event={"ID":"488534f7-df72-4134-8aef-f7812bb0a497","Type":"ContainerDied","Data":"627668fa5e6265c48f7573e977134aa38eee74bbb091f53c85baadc96fadadfd"} Feb 16 15:09:20 crc kubenswrapper[4816]: I0216 15:09:20.794836 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4gjjf" event={"ID":"29e62fcf-7477-48b5-add2-f9ca068f2dca","Type":"ContainerStarted","Data":"5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247"} Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.384837 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.413505 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4gjjf" podStartSLOduration=4.898699576 podStartE2EDuration="8.413487462s" podCreationTimestamp="2026-02-16 15:09:14 +0000 UTC" firstStartedPulling="2026-02-16 15:09:16.687606026 +0000 UTC m=+7556.014319754" lastFinishedPulling="2026-02-16 15:09:20.202393902 +0000 UTC m=+7559.529107640" observedRunningTime="2026-02-16 15:09:20.846678253 +0000 UTC m=+7560.173391991" watchObservedRunningTime="2026-02-16 15:09:22.413487462 +0000 UTC m=+7561.740201190" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.453769 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ssh-key-openstack-cell1\") pod \"488534f7-df72-4134-8aef-f7812bb0a497\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.453838 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-ovn-metadata-agent-neutron-config-0\") pod \"488534f7-df72-4134-8aef-f7812bb0a497\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.453918 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-inventory\") pod \"488534f7-df72-4134-8aef-f7812bb0a497\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.453939 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-nova-metadata-neutron-config-0\") pod \"488534f7-df72-4134-8aef-f7812bb0a497\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.454102 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2h2c\" (UniqueName: \"kubernetes.io/projected/488534f7-df72-4134-8aef-f7812bb0a497-kube-api-access-v2h2c\") pod \"488534f7-df72-4134-8aef-f7812bb0a497\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.454166 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-metadata-combined-ca-bundle\") pod \"488534f7-df72-4134-8aef-f7812bb0a497\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.454253 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ceph\") pod \"488534f7-df72-4134-8aef-f7812bb0a497\" (UID: \"488534f7-df72-4134-8aef-f7812bb0a497\") " Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.458924 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/488534f7-df72-4134-8aef-f7812bb0a497-kube-api-access-v2h2c" (OuterVolumeSpecName: "kube-api-access-v2h2c") pod "488534f7-df72-4134-8aef-f7812bb0a497" (UID: "488534f7-df72-4134-8aef-f7812bb0a497"). InnerVolumeSpecName "kube-api-access-v2h2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.458958 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "488534f7-df72-4134-8aef-f7812bb0a497" (UID: "488534f7-df72-4134-8aef-f7812bb0a497"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.460499 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ceph" (OuterVolumeSpecName: "ceph") pod "488534f7-df72-4134-8aef-f7812bb0a497" (UID: "488534f7-df72-4134-8aef-f7812bb0a497"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.488612 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "488534f7-df72-4134-8aef-f7812bb0a497" (UID: "488534f7-df72-4134-8aef-f7812bb0a497"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.489312 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-inventory" (OuterVolumeSpecName: "inventory") pod "488534f7-df72-4134-8aef-f7812bb0a497" (UID: "488534f7-df72-4134-8aef-f7812bb0a497"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.489333 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "488534f7-df72-4134-8aef-f7812bb0a497" (UID: "488534f7-df72-4134-8aef-f7812bb0a497"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.491677 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "488534f7-df72-4134-8aef-f7812bb0a497" (UID: "488534f7-df72-4134-8aef-f7812bb0a497"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.556635 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2h2c\" (UniqueName: \"kubernetes.io/projected/488534f7-df72-4134-8aef-f7812bb0a497-kube-api-access-v2h2c\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.556690 4816 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.556705 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.556720 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.556734 4816 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.556748 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.556759 4816 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/488534f7-df72-4134-8aef-f7812bb0a497-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.602228 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.602302 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.872811 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" event={"ID":"488534f7-df72-4134-8aef-f7812bb0a497","Type":"ContainerDied","Data":"8e24f870703dadbe5bf2e28e37552356d27bd7a4d71b5490fea58c5b93242d43"} Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.873188 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e24f870703dadbe5bf2e28e37552356d27bd7a4d71b5490fea58c5b93242d43" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.873249 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-tggb5" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.972553 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-mjw59"] Feb 16 15:09:22 crc kubenswrapper[4816]: E0216 15:09:22.973097 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="488534f7-df72-4134-8aef-f7812bb0a497" containerName="neutron-metadata-openstack-openstack-cell1" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.973114 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="488534f7-df72-4134-8aef-f7812bb0a497" containerName="neutron-metadata-openstack-openstack-cell1" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.973464 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="488534f7-df72-4134-8aef-f7812bb0a497" containerName="neutron-metadata-openstack-openstack-cell1" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.974238 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.976570 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.976903 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.977012 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.977044 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.979617 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Feb 16 15:09:22 crc kubenswrapper[4816]: I0216 15:09:22.986859 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-mjw59"] Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.068765 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ceph\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.068884 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkgdr\" (UniqueName: \"kubernetes.io/projected/d16b3ac3-df67-4ab9-9585-e121d8aababc-kube-api-access-fkgdr\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.068919 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ssh-key-openstack-cell1\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.069001 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-inventory\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.069061 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.069179 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.170552 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.170633 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ceph\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.170712 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkgdr\" (UniqueName: \"kubernetes.io/projected/d16b3ac3-df67-4ab9-9585-e121d8aababc-kube-api-access-fkgdr\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.170738 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ssh-key-openstack-cell1\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.170789 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-inventory\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.170831 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.175749 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-inventory\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.176178 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ceph\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.176788 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.188172 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ssh-key-openstack-cell1\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.188579 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.190156 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkgdr\" (UniqueName: \"kubernetes.io/projected/d16b3ac3-df67-4ab9-9585-e121d8aababc-kube-api-access-fkgdr\") pod \"libvirt-openstack-openstack-cell1-mjw59\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.300317 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:09:23 crc kubenswrapper[4816]: I0216 15:09:23.668330 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-qbxqm" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerName="registry-server" probeResult="failure" output=< Feb 16 15:09:23 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 15:09:23 crc kubenswrapper[4816]: > Feb 16 15:09:24 crc kubenswrapper[4816]: I0216 15:09:24.085897 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-mjw59"] Feb 16 15:09:25 crc kubenswrapper[4816]: I0216 15:09:25.375318 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:25 crc kubenswrapper[4816]: I0216 15:09:25.376090 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:25 crc kubenswrapper[4816]: I0216 15:09:25.446039 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-mjw59" event={"ID":"d16b3ac3-df67-4ab9-9585-e121d8aababc","Type":"ContainerStarted","Data":"b63a20c8818889d09220fc6d87cd2b8791fb5bcfd57bec5b791aaa0a4d884301"} Feb 16 15:09:25 crc kubenswrapper[4816]: I0216 15:09:25.446089 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-mjw59" event={"ID":"d16b3ac3-df67-4ab9-9585-e121d8aababc","Type":"ContainerStarted","Data":"e62c39dbf98c49f693894b8bca73468edc31b7f68ef05d2f94ad47c0ea4828b6"} Feb 16 15:09:25 crc kubenswrapper[4816]: I0216 15:09:25.492394 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:25 crc kubenswrapper[4816]: I0216 15:09:25.514098 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-mjw59" podStartSLOduration=3.119854872 podStartE2EDuration="3.514080877s" podCreationTimestamp="2026-02-16 15:09:22 +0000 UTC" firstStartedPulling="2026-02-16 15:09:24.102563872 +0000 UTC m=+7563.429277600" lastFinishedPulling="2026-02-16 15:09:24.496789877 +0000 UTC m=+7563.823503605" observedRunningTime="2026-02-16 15:09:25.457083153 +0000 UTC m=+7564.783796881" watchObservedRunningTime="2026-02-16 15:09:25.514080877 +0000 UTC m=+7564.840794605" Feb 16 15:09:26 crc kubenswrapper[4816]: I0216 15:09:26.900154 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:26 crc kubenswrapper[4816]: I0216 15:09:26.959824 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4gjjf"] Feb 16 15:09:28 crc kubenswrapper[4816]: I0216 15:09:28.472070 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5kvrr" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="registry-server" probeResult="failure" output=< Feb 16 15:09:28 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 15:09:28 crc kubenswrapper[4816]: > Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.035631 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4gjjf" podUID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerName="registry-server" containerID="cri-o://5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247" gracePeriod=2 Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.677348 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.835386 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9hcz\" (UniqueName: \"kubernetes.io/projected/29e62fcf-7477-48b5-add2-f9ca068f2dca-kube-api-access-g9hcz\") pod \"29e62fcf-7477-48b5-add2-f9ca068f2dca\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.835635 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-catalog-content\") pod \"29e62fcf-7477-48b5-add2-f9ca068f2dca\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.835696 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-utilities\") pod \"29e62fcf-7477-48b5-add2-f9ca068f2dca\" (UID: \"29e62fcf-7477-48b5-add2-f9ca068f2dca\") " Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.836378 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-utilities" (OuterVolumeSpecName: "utilities") pod "29e62fcf-7477-48b5-add2-f9ca068f2dca" (UID: "29e62fcf-7477-48b5-add2-f9ca068f2dca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.841894 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29e62fcf-7477-48b5-add2-f9ca068f2dca-kube-api-access-g9hcz" (OuterVolumeSpecName: "kube-api-access-g9hcz") pod "29e62fcf-7477-48b5-add2-f9ca068f2dca" (UID: "29e62fcf-7477-48b5-add2-f9ca068f2dca"). InnerVolumeSpecName "kube-api-access-g9hcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.854305 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29e62fcf-7477-48b5-add2-f9ca068f2dca" (UID: "29e62fcf-7477-48b5-add2-f9ca068f2dca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.937689 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.937728 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e62fcf-7477-48b5-add2-f9ca068f2dca-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:29 crc kubenswrapper[4816]: I0216 15:09:29.937739 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9hcz\" (UniqueName: \"kubernetes.io/projected/29e62fcf-7477-48b5-add2-f9ca068f2dca-kube-api-access-g9hcz\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.046676 4816 generic.go:334] "Generic (PLEG): container finished" podID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerID="5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247" exitCode=0 Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.046688 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4gjjf" event={"ID":"29e62fcf-7477-48b5-add2-f9ca068f2dca","Type":"ContainerDied","Data":"5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247"} Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.046748 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4gjjf" event={"ID":"29e62fcf-7477-48b5-add2-f9ca068f2dca","Type":"ContainerDied","Data":"97087228601c43199508e676b434c88b5df431e67dcb93b761e2b7582162ed5d"} Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.046775 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4gjjf" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.046792 4816 scope.go:117] "RemoveContainer" containerID="5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.069413 4816 scope.go:117] "RemoveContainer" containerID="46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.096170 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4gjjf"] Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.108374 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4gjjf"] Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.116736 4816 scope.go:117] "RemoveContainer" containerID="6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.143697 4816 scope.go:117] "RemoveContainer" containerID="5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247" Feb 16 15:09:30 crc kubenswrapper[4816]: E0216 15:09:30.144425 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247\": container with ID starting with 5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247 not found: ID does not exist" containerID="5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.144469 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247"} err="failed to get container status \"5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247\": rpc error: code = NotFound desc = could not find container \"5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247\": container with ID starting with 5ad3d4149aee2b9dc864bf4b2baee4fd7c9d35dce4cb7dd0502f25069cc9e247 not found: ID does not exist" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.144494 4816 scope.go:117] "RemoveContainer" containerID="46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a" Feb 16 15:09:30 crc kubenswrapper[4816]: E0216 15:09:30.145140 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a\": container with ID starting with 46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a not found: ID does not exist" containerID="46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.145165 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a"} err="failed to get container status \"46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a\": rpc error: code = NotFound desc = could not find container \"46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a\": container with ID starting with 46762eb3745c04b513dd9debd370a5d994f3e6504d306743b21ce6d11571926a not found: ID does not exist" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.145184 4816 scope.go:117] "RemoveContainer" containerID="6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b" Feb 16 15:09:30 crc kubenswrapper[4816]: E0216 15:09:30.145528 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b\": container with ID starting with 6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b not found: ID does not exist" containerID="6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b" Feb 16 15:09:30 crc kubenswrapper[4816]: I0216 15:09:30.145554 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b"} err="failed to get container status \"6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b\": rpc error: code = NotFound desc = could not find container \"6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b\": container with ID starting with 6065c975a5d4c7d932440a72639cec42e1702cedbbb2b8c064e0a66b857c163b not found: ID does not exist" Feb 16 15:09:31 crc kubenswrapper[4816]: I0216 15:09:31.413422 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29e62fcf-7477-48b5-add2-f9ca068f2dca" path="/var/lib/kubelet/pods/29e62fcf-7477-48b5-add2-f9ca068f2dca/volumes" Feb 16 15:09:32 crc kubenswrapper[4816]: I0216 15:09:32.398523 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:09:32 crc kubenswrapper[4816]: E0216 15:09:32.399326 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:09:32 crc kubenswrapper[4816]: I0216 15:09:32.663146 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:32 crc kubenswrapper[4816]: I0216 15:09:32.733079 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:33 crc kubenswrapper[4816]: I0216 15:09:33.538970 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qbxqm"] Feb 16 15:09:34 crc kubenswrapper[4816]: I0216 15:09:34.089367 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qbxqm" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerName="registry-server" containerID="cri-o://157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33" gracePeriod=2 Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:34.825326 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:34.955445 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-catalog-content\") pod \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:34.955546 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nv6gz\" (UniqueName: \"kubernetes.io/projected/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-kube-api-access-nv6gz\") pod \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:34.955592 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-utilities\") pod \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\" (UID: \"7eef047f-0c37-49ab-8ecc-bbd4173e8e43\") " Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:34.956580 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-utilities" (OuterVolumeSpecName: "utilities") pod "7eef047f-0c37-49ab-8ecc-bbd4173e8e43" (UID: "7eef047f-0c37-49ab-8ecc-bbd4173e8e43"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:34.997068 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-kube-api-access-nv6gz" (OuterVolumeSpecName: "kube-api-access-nv6gz") pod "7eef047f-0c37-49ab-8ecc-bbd4173e8e43" (UID: "7eef047f-0c37-49ab-8ecc-bbd4173e8e43"). InnerVolumeSpecName "kube-api-access-nv6gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.062621 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nv6gz\" (UniqueName: \"kubernetes.io/projected/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-kube-api-access-nv6gz\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.062667 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.130724 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7eef047f-0c37-49ab-8ecc-bbd4173e8e43" (UID: "7eef047f-0c37-49ab-8ecc-bbd4173e8e43"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.145973 4816 generic.go:334] "Generic (PLEG): container finished" podID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerID="157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33" exitCode=0 Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.146031 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbxqm" event={"ID":"7eef047f-0c37-49ab-8ecc-bbd4173e8e43","Type":"ContainerDied","Data":"157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33"} Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.146066 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbxqm" event={"ID":"7eef047f-0c37-49ab-8ecc-bbd4173e8e43","Type":"ContainerDied","Data":"92c9b2551bb10d601e1c1bce0de8b3773c899dae52ab13627aceed826621d2a8"} Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.146089 4816 scope.go:117] "RemoveContainer" containerID="157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.146135 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qbxqm" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.164424 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eef047f-0c37-49ab-8ecc-bbd4173e8e43-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.202833 4816 scope.go:117] "RemoveContainer" containerID="b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.226300 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qbxqm"] Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.244285 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qbxqm"] Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.285869 4816 scope.go:117] "RemoveContainer" containerID="2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.335471 4816 scope.go:117] "RemoveContainer" containerID="157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33" Feb 16 15:09:35 crc kubenswrapper[4816]: E0216 15:09:35.337806 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33\": container with ID starting with 157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33 not found: ID does not exist" containerID="157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.337844 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33"} err="failed to get container status \"157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33\": rpc error: code = NotFound desc = could not find container \"157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33\": container with ID starting with 157f528001cda12f413c79806065a7da4afd89a3bc36cdd0965f3ce8431f6f33 not found: ID does not exist" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.337867 4816 scope.go:117] "RemoveContainer" containerID="b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794" Feb 16 15:09:35 crc kubenswrapper[4816]: E0216 15:09:35.350218 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794\": container with ID starting with b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794 not found: ID does not exist" containerID="b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.350256 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794"} err="failed to get container status \"b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794\": rpc error: code = NotFound desc = could not find container \"b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794\": container with ID starting with b69e4531397d166aacead35337a3eb760651d32a3b9437109962de8cabf17794 not found: ID does not exist" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.350303 4816 scope.go:117] "RemoveContainer" containerID="2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5" Feb 16 15:09:35 crc kubenswrapper[4816]: E0216 15:09:35.351451 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5\": container with ID starting with 2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5 not found: ID does not exist" containerID="2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.351480 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5"} err="failed to get container status \"2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5\": rpc error: code = NotFound desc = could not find container \"2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5\": container with ID starting with 2810afd2b68b7a526891ccbf438db5dcf93598761c40ca28cab5be658eb22eb5 not found: ID does not exist" Feb 16 15:09:35 crc kubenswrapper[4816]: I0216 15:09:35.417752 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" path="/var/lib/kubelet/pods/7eef047f-0c37-49ab-8ecc-bbd4173e8e43/volumes" Feb 16 15:09:37 crc kubenswrapper[4816]: I0216 15:09:37.480754 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:09:37 crc kubenswrapper[4816]: I0216 15:09:37.549936 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:09:37 crc kubenswrapper[4816]: I0216 15:09:37.948053 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5kvrr"] Feb 16 15:09:39 crc kubenswrapper[4816]: I0216 15:09:39.205560 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5kvrr" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="registry-server" containerID="cri-o://ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826" gracePeriod=2 Feb 16 15:09:39 crc kubenswrapper[4816]: I0216 15:09:39.905030 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.091563 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdgfj\" (UniqueName: \"kubernetes.io/projected/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-kube-api-access-rdgfj\") pod \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.091781 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-catalog-content\") pod \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.091824 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-utilities\") pod \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\" (UID: \"0dffeb91-4db9-49ec-b8f7-c3f301824a2e\") " Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.092541 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-utilities" (OuterVolumeSpecName: "utilities") pod "0dffeb91-4db9-49ec-b8f7-c3f301824a2e" (UID: "0dffeb91-4db9-49ec-b8f7-c3f301824a2e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.100479 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-kube-api-access-rdgfj" (OuterVolumeSpecName: "kube-api-access-rdgfj") pod "0dffeb91-4db9-49ec-b8f7-c3f301824a2e" (UID: "0dffeb91-4db9-49ec-b8f7-c3f301824a2e"). InnerVolumeSpecName "kube-api-access-rdgfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.194851 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdgfj\" (UniqueName: \"kubernetes.io/projected/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-kube-api-access-rdgfj\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.194894 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.216237 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0dffeb91-4db9-49ec-b8f7-c3f301824a2e" (UID: "0dffeb91-4db9-49ec-b8f7-c3f301824a2e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.229080 4816 generic.go:334] "Generic (PLEG): container finished" podID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerID="ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826" exitCode=0 Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.229119 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5kvrr" event={"ID":"0dffeb91-4db9-49ec-b8f7-c3f301824a2e","Type":"ContainerDied","Data":"ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826"} Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.229146 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5kvrr" event={"ID":"0dffeb91-4db9-49ec-b8f7-c3f301824a2e","Type":"ContainerDied","Data":"f3b1a461b1a86bdc65e3174bd82b7f782792919ad01b6cd3eecab36aeb3c2d30"} Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.229168 4816 scope.go:117] "RemoveContainer" containerID="ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.229297 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5kvrr" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.266902 4816 scope.go:117] "RemoveContainer" containerID="b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.267488 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5kvrr"] Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.279631 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5kvrr"] Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.303252 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dffeb91-4db9-49ec-b8f7-c3f301824a2e-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.308841 4816 scope.go:117] "RemoveContainer" containerID="e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.359194 4816 scope.go:117] "RemoveContainer" containerID="ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826" Feb 16 15:09:40 crc kubenswrapper[4816]: E0216 15:09:40.359729 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826\": container with ID starting with ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826 not found: ID does not exist" containerID="ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.359801 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826"} err="failed to get container status \"ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826\": rpc error: code = NotFound desc = could not find container \"ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826\": container with ID starting with ecb245f516a03eba51d3482cbe1b81d1187b3f0bef8d68f8efcb11d3cb84c826 not found: ID does not exist" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.359855 4816 scope.go:117] "RemoveContainer" containerID="b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b" Feb 16 15:09:40 crc kubenswrapper[4816]: E0216 15:09:40.360325 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b\": container with ID starting with b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b not found: ID does not exist" containerID="b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.360364 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b"} err="failed to get container status \"b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b\": rpc error: code = NotFound desc = could not find container \"b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b\": container with ID starting with b4d21857fc0d339f3196a5587904d86c3ef451efa2f924f9997fba1b7053628b not found: ID does not exist" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.360393 4816 scope.go:117] "RemoveContainer" containerID="e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f" Feb 16 15:09:40 crc kubenswrapper[4816]: E0216 15:09:40.360756 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f\": container with ID starting with e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f not found: ID does not exist" containerID="e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f" Feb 16 15:09:40 crc kubenswrapper[4816]: I0216 15:09:40.360808 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f"} err="failed to get container status \"e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f\": rpc error: code = NotFound desc = could not find container \"e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f\": container with ID starting with e85460c4b60062b944577961959a8c177d7de240a4518e68c4e7bf0ab9d4df3f not found: ID does not exist" Feb 16 15:09:41 crc kubenswrapper[4816]: I0216 15:09:41.417049 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" path="/var/lib/kubelet/pods/0dffeb91-4db9-49ec-b8f7-c3f301824a2e/volumes" Feb 16 15:09:44 crc kubenswrapper[4816]: I0216 15:09:44.399962 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:09:44 crc kubenswrapper[4816]: E0216 15:09:44.401365 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:09:57 crc kubenswrapper[4816]: I0216 15:09:57.398682 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:09:57 crc kubenswrapper[4816]: E0216 15:09:57.399713 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:10:11 crc kubenswrapper[4816]: I0216 15:10:11.412368 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:10:11 crc kubenswrapper[4816]: E0216 15:10:11.413344 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:10:26 crc kubenswrapper[4816]: I0216 15:10:26.399139 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:10:26 crc kubenswrapper[4816]: E0216 15:10:26.399943 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:10:38 crc kubenswrapper[4816]: I0216 15:10:38.400256 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:10:38 crc kubenswrapper[4816]: E0216 15:10:38.401628 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:10:52 crc kubenswrapper[4816]: I0216 15:10:52.399087 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:10:52 crc kubenswrapper[4816]: E0216 15:10:52.400076 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:11:05 crc kubenswrapper[4816]: I0216 15:11:05.400581 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:11:05 crc kubenswrapper[4816]: E0216 15:11:05.402777 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:11:20 crc kubenswrapper[4816]: I0216 15:11:20.413725 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:11:20 crc kubenswrapper[4816]: E0216 15:11:20.414673 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:11:35 crc kubenswrapper[4816]: I0216 15:11:35.400897 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:11:35 crc kubenswrapper[4816]: E0216 15:11:35.401623 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:11:48 crc kubenswrapper[4816]: I0216 15:11:48.398522 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:11:48 crc kubenswrapper[4816]: E0216 15:11:48.399162 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:12:03 crc kubenswrapper[4816]: I0216 15:12:03.400078 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:12:03 crc kubenswrapper[4816]: E0216 15:12:03.404870 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:12:15 crc kubenswrapper[4816]: I0216 15:12:15.398528 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:12:15 crc kubenswrapper[4816]: E0216 15:12:15.399313 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:12:30 crc kubenswrapper[4816]: I0216 15:12:30.398876 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:12:30 crc kubenswrapper[4816]: E0216 15:12:30.399679 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:12:45 crc kubenswrapper[4816]: I0216 15:12:45.398725 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:12:46 crc kubenswrapper[4816]: I0216 15:12:46.445169 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"abc3f764bb4b3f381409d82a3515b5121ee9b603be7bb865024b2e6079e576ca"} Feb 16 15:13:51 crc kubenswrapper[4816]: I0216 15:13:51.093042 4816 generic.go:334] "Generic (PLEG): container finished" podID="d16b3ac3-df67-4ab9-9585-e121d8aababc" containerID="b63a20c8818889d09220fc6d87cd2b8791fb5bcfd57bec5b791aaa0a4d884301" exitCode=0 Feb 16 15:13:51 crc kubenswrapper[4816]: I0216 15:13:51.093137 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-mjw59" event={"ID":"d16b3ac3-df67-4ab9-9585-e121d8aababc","Type":"ContainerDied","Data":"b63a20c8818889d09220fc6d87cd2b8791fb5bcfd57bec5b791aaa0a4d884301"} Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.615484 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.710887 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-inventory\") pod \"d16b3ac3-df67-4ab9-9585-e121d8aababc\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.711227 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ssh-key-openstack-cell1\") pod \"d16b3ac3-df67-4ab9-9585-e121d8aababc\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.711275 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-secret-0\") pod \"d16b3ac3-df67-4ab9-9585-e121d8aababc\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.711361 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-combined-ca-bundle\") pod \"d16b3ac3-df67-4ab9-9585-e121d8aababc\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.711421 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ceph\") pod \"d16b3ac3-df67-4ab9-9585-e121d8aababc\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.711535 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkgdr\" (UniqueName: \"kubernetes.io/projected/d16b3ac3-df67-4ab9-9585-e121d8aababc-kube-api-access-fkgdr\") pod \"d16b3ac3-df67-4ab9-9585-e121d8aababc\" (UID: \"d16b3ac3-df67-4ab9-9585-e121d8aababc\") " Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.716845 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "d16b3ac3-df67-4ab9-9585-e121d8aababc" (UID: "d16b3ac3-df67-4ab9-9585-e121d8aababc"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.717202 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d16b3ac3-df67-4ab9-9585-e121d8aababc-kube-api-access-fkgdr" (OuterVolumeSpecName: "kube-api-access-fkgdr") pod "d16b3ac3-df67-4ab9-9585-e121d8aababc" (UID: "d16b3ac3-df67-4ab9-9585-e121d8aababc"). InnerVolumeSpecName "kube-api-access-fkgdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.717445 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ceph" (OuterVolumeSpecName: "ceph") pod "d16b3ac3-df67-4ab9-9585-e121d8aababc" (UID: "d16b3ac3-df67-4ab9-9585-e121d8aababc"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.741801 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "d16b3ac3-df67-4ab9-9585-e121d8aababc" (UID: "d16b3ac3-df67-4ab9-9585-e121d8aababc"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.742013 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-inventory" (OuterVolumeSpecName: "inventory") pod "d16b3ac3-df67-4ab9-9585-e121d8aababc" (UID: "d16b3ac3-df67-4ab9-9585-e121d8aababc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.743221 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "d16b3ac3-df67-4ab9-9585-e121d8aababc" (UID: "d16b3ac3-df67-4ab9-9585-e121d8aababc"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.814847 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.814883 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.814895 4816 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.814905 4816 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.814914 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d16b3ac3-df67-4ab9-9585-e121d8aababc-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:13:52 crc kubenswrapper[4816]: I0216 15:13:52.814924 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkgdr\" (UniqueName: \"kubernetes.io/projected/d16b3ac3-df67-4ab9-9585-e121d8aababc-kube-api-access-fkgdr\") on node \"crc\" DevicePath \"\"" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.125771 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-mjw59" event={"ID":"d16b3ac3-df67-4ab9-9585-e121d8aababc","Type":"ContainerDied","Data":"e62c39dbf98c49f693894b8bca73468edc31b7f68ef05d2f94ad47c0ea4828b6"} Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.125847 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e62c39dbf98c49f693894b8bca73468edc31b7f68ef05d2f94ad47c0ea4828b6" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.125919 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-mjw59" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.220322 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-sxj25"] Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.220830 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerName="extract-utilities" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.220862 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerName="extract-utilities" Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.220891 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerName="extract-content" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.220898 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerName="extract-content" Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.220913 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerName="registry-server" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.220922 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerName="registry-server" Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.220939 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="registry-server" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.220947 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="registry-server" Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.220954 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerName="registry-server" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.220960 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerName="registry-server" Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.220973 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="extract-content" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.220979 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="extract-content" Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.220988 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d16b3ac3-df67-4ab9-9585-e121d8aababc" containerName="libvirt-openstack-openstack-cell1" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.220995 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d16b3ac3-df67-4ab9-9585-e121d8aababc" containerName="libvirt-openstack-openstack-cell1" Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.221009 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerName="extract-utilities" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.221015 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerName="extract-utilities" Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.221028 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerName="extract-content" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.221034 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerName="extract-content" Feb 16 15:13:53 crc kubenswrapper[4816]: E0216 15:13:53.221044 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="extract-utilities" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.221050 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="extract-utilities" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.221248 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eef047f-0c37-49ab-8ecc-bbd4173e8e43" containerName="registry-server" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.221261 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d16b3ac3-df67-4ab9-9585-e121d8aababc" containerName="libvirt-openstack-openstack-cell1" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.221269 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dffeb91-4db9-49ec-b8f7-c3f301824a2e" containerName="registry-server" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.221291 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="29e62fcf-7477-48b5-add2-f9ca068f2dca" containerName="registry-server" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.222069 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.224594 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.224631 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.224748 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.224895 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.224972 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.225553 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.232055 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.232769 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-sxj25"] Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.324737 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.324808 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.324826 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.324903 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.324935 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8kmm\" (UniqueName: \"kubernetes.io/projected/17ce2002-16df-42d4-b9c3-e9bc15ad020f-kube-api-access-f8kmm\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.324962 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.325017 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-inventory\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.325061 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.325091 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ceph\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.325111 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.325138 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.325308 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-3\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.325361 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-2\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428113 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428374 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ceph\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428432 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428486 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428533 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-3\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428572 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-2\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428681 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428766 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428782 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428810 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428853 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8kmm\" (UniqueName: \"kubernetes.io/projected/17ce2002-16df-42d4-b9c3-e9bc15ad020f-kube-api-access-f8kmm\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428886 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.428942 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-inventory\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.430229 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.429913 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.432935 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ceph\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.433331 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.434187 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.434195 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.434826 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-3\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.439082 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.440729 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.441207 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-2\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.440129 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-inventory\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.442498 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.453469 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8kmm\" (UniqueName: \"kubernetes.io/projected/17ce2002-16df-42d4-b9c3-e9bc15ad020f-kube-api-access-f8kmm\") pod \"nova-cell1-openstack-openstack-cell1-sxj25\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:53 crc kubenswrapper[4816]: I0216 15:13:53.540269 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:13:54 crc kubenswrapper[4816]: I0216 15:13:54.098253 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-sxj25"] Feb 16 15:13:54 crc kubenswrapper[4816]: I0216 15:13:54.104175 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 15:13:54 crc kubenswrapper[4816]: I0216 15:13:54.140193 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" event={"ID":"17ce2002-16df-42d4-b9c3-e9bc15ad020f","Type":"ContainerStarted","Data":"5935d2249d0f38216bea92847e7579f3bf8136c220025a37f4c61578a7078e32"} Feb 16 15:13:55 crc kubenswrapper[4816]: I0216 15:13:55.152932 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" event={"ID":"17ce2002-16df-42d4-b9c3-e9bc15ad020f","Type":"ContainerStarted","Data":"3ef56082feb2047e5f5abceca11fa66d3c8564d836bb139df40edb857d44a735"} Feb 16 15:13:55 crc kubenswrapper[4816]: I0216 15:13:55.173256 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" podStartSLOduration=1.68315531 podStartE2EDuration="2.173217611s" podCreationTimestamp="2026-02-16 15:13:53 +0000 UTC" firstStartedPulling="2026-02-16 15:13:54.103930555 +0000 UTC m=+7833.430644283" lastFinishedPulling="2026-02-16 15:13:54.593992856 +0000 UTC m=+7833.920706584" observedRunningTime="2026-02-16 15:13:55.171419162 +0000 UTC m=+7834.498132900" watchObservedRunningTime="2026-02-16 15:13:55.173217611 +0000 UTC m=+7834.499931339" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.184077 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2"] Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.186316 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.195504 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.195872 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.197669 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2"] Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.272589 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpmrt\" (UniqueName: \"kubernetes.io/projected/a85fa645-afdd-4783-a81c-a88a4602206c-kube-api-access-wpmrt\") pod \"collect-profiles-29520915-qv5g2\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.272845 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a85fa645-afdd-4783-a81c-a88a4602206c-config-volume\") pod \"collect-profiles-29520915-qv5g2\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.272957 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a85fa645-afdd-4783-a81c-a88a4602206c-secret-volume\") pod \"collect-profiles-29520915-qv5g2\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.375239 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a85fa645-afdd-4783-a81c-a88a4602206c-config-volume\") pod \"collect-profiles-29520915-qv5g2\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.375360 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a85fa645-afdd-4783-a81c-a88a4602206c-secret-volume\") pod \"collect-profiles-29520915-qv5g2\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.375509 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpmrt\" (UniqueName: \"kubernetes.io/projected/a85fa645-afdd-4783-a81c-a88a4602206c-kube-api-access-wpmrt\") pod \"collect-profiles-29520915-qv5g2\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.376475 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a85fa645-afdd-4783-a81c-a88a4602206c-config-volume\") pod \"collect-profiles-29520915-qv5g2\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.385115 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a85fa645-afdd-4783-a81c-a88a4602206c-secret-volume\") pod \"collect-profiles-29520915-qv5g2\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.399443 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpmrt\" (UniqueName: \"kubernetes.io/projected/a85fa645-afdd-4783-a81c-a88a4602206c-kube-api-access-wpmrt\") pod \"collect-profiles-29520915-qv5g2\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.524052 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:00 crc kubenswrapper[4816]: I0216 15:15:00.993275 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2"] Feb 16 15:15:01 crc kubenswrapper[4816]: I0216 15:15:01.482763 4816 generic.go:334] "Generic (PLEG): container finished" podID="a85fa645-afdd-4783-a81c-a88a4602206c" containerID="25bba3b8379b982d5952c62b43eeb92413e6e259da4234fa2661099efdda7a66" exitCode=0 Feb 16 15:15:01 crc kubenswrapper[4816]: I0216 15:15:01.482841 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" event={"ID":"a85fa645-afdd-4783-a81c-a88a4602206c","Type":"ContainerDied","Data":"25bba3b8379b982d5952c62b43eeb92413e6e259da4234fa2661099efdda7a66"} Feb 16 15:15:01 crc kubenswrapper[4816]: I0216 15:15:01.483056 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" event={"ID":"a85fa645-afdd-4783-a81c-a88a4602206c","Type":"ContainerStarted","Data":"52569d578f90cd2e0e195a3a3775187acffa3ae842bb0927f795e0e30780805c"} Feb 16 15:15:01 crc kubenswrapper[4816]: E0216 15:15:01.559471 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda85fa645_afdd_4783_a81c_a88a4602206c.slice/crio-25bba3b8379b982d5952c62b43eeb92413e6e259da4234fa2661099efdda7a66.scope\": RecentStats: unable to find data in memory cache]" Feb 16 15:15:02 crc kubenswrapper[4816]: I0216 15:15:02.858449 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.033800 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a85fa645-afdd-4783-a81c-a88a4602206c-config-volume\") pod \"a85fa645-afdd-4783-a81c-a88a4602206c\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.033868 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpmrt\" (UniqueName: \"kubernetes.io/projected/a85fa645-afdd-4783-a81c-a88a4602206c-kube-api-access-wpmrt\") pod \"a85fa645-afdd-4783-a81c-a88a4602206c\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.034285 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a85fa645-afdd-4783-a81c-a88a4602206c-secret-volume\") pod \"a85fa645-afdd-4783-a81c-a88a4602206c\" (UID: \"a85fa645-afdd-4783-a81c-a88a4602206c\") " Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.034940 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a85fa645-afdd-4783-a81c-a88a4602206c-config-volume" (OuterVolumeSpecName: "config-volume") pod "a85fa645-afdd-4783-a81c-a88a4602206c" (UID: "a85fa645-afdd-4783-a81c-a88a4602206c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.035262 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a85fa645-afdd-4783-a81c-a88a4602206c-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.041907 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a85fa645-afdd-4783-a81c-a88a4602206c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a85fa645-afdd-4783-a81c-a88a4602206c" (UID: "a85fa645-afdd-4783-a81c-a88a4602206c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.041978 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a85fa645-afdd-4783-a81c-a88a4602206c-kube-api-access-wpmrt" (OuterVolumeSpecName: "kube-api-access-wpmrt") pod "a85fa645-afdd-4783-a81c-a88a4602206c" (UID: "a85fa645-afdd-4783-a81c-a88a4602206c"). InnerVolumeSpecName "kube-api-access-wpmrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.137902 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a85fa645-afdd-4783-a81c-a88a4602206c-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.137943 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpmrt\" (UniqueName: \"kubernetes.io/projected/a85fa645-afdd-4783-a81c-a88a4602206c-kube-api-access-wpmrt\") on node \"crc\" DevicePath \"\"" Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.504100 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" event={"ID":"a85fa645-afdd-4783-a81c-a88a4602206c","Type":"ContainerDied","Data":"52569d578f90cd2e0e195a3a3775187acffa3ae842bb0927f795e0e30780805c"} Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.504395 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52569d578f90cd2e0e195a3a3775187acffa3ae842bb0927f795e0e30780805c" Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.504175 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520915-qv5g2" Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.944889 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx"] Feb 16 15:15:03 crc kubenswrapper[4816]: I0216 15:15:03.953737 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520870-kqrnx"] Feb 16 15:15:05 crc kubenswrapper[4816]: I0216 15:15:05.419668 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a578cf17-945b-4b1e-a4a1-3b14c35d4115" path="/var/lib/kubelet/pods/a578cf17-945b-4b1e-a4a1-3b14c35d4115/volumes" Feb 16 15:15:06 crc kubenswrapper[4816]: I0216 15:15:06.940922 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:15:06 crc kubenswrapper[4816]: I0216 15:15:06.941265 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:15:33 crc kubenswrapper[4816]: I0216 15:15:33.442711 4816 scope.go:117] "RemoveContainer" containerID="0a4df06fbeb1564cef27a8a31a5c0e168c5cb5ee0964133883ea23ab4452e508" Feb 16 15:15:36 crc kubenswrapper[4816]: I0216 15:15:36.941299 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:15:36 crc kubenswrapper[4816]: I0216 15:15:36.941908 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:16:06 crc kubenswrapper[4816]: I0216 15:16:06.940794 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:16:06 crc kubenswrapper[4816]: I0216 15:16:06.941208 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:16:06 crc kubenswrapper[4816]: I0216 15:16:06.941254 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 15:16:06 crc kubenswrapper[4816]: I0216 15:16:06.942147 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"abc3f764bb4b3f381409d82a3515b5121ee9b603be7bb865024b2e6079e576ca"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 15:16:06 crc kubenswrapper[4816]: I0216 15:16:06.942225 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://abc3f764bb4b3f381409d82a3515b5121ee9b603be7bb865024b2e6079e576ca" gracePeriod=600 Feb 16 15:16:07 crc kubenswrapper[4816]: I0216 15:16:07.215290 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="abc3f764bb4b3f381409d82a3515b5121ee9b603be7bb865024b2e6079e576ca" exitCode=0 Feb 16 15:16:07 crc kubenswrapper[4816]: I0216 15:16:07.215353 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"abc3f764bb4b3f381409d82a3515b5121ee9b603be7bb865024b2e6079e576ca"} Feb 16 15:16:07 crc kubenswrapper[4816]: I0216 15:16:07.215698 4816 scope.go:117] "RemoveContainer" containerID="f3b615f7da42909612a48868086b7cf72e4553e1f343840ecdb20393dd3b86f2" Feb 16 15:16:08 crc kubenswrapper[4816]: I0216 15:16:08.228531 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3"} Feb 16 15:16:49 crc kubenswrapper[4816]: I0216 15:16:49.669159 4816 generic.go:334] "Generic (PLEG): container finished" podID="17ce2002-16df-42d4-b9c3-e9bc15ad020f" containerID="3ef56082feb2047e5f5abceca11fa66d3c8564d836bb139df40edb857d44a735" exitCode=0 Feb 16 15:16:49 crc kubenswrapper[4816]: I0216 15:16:49.669279 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" event={"ID":"17ce2002-16df-42d4-b9c3-e9bc15ad020f","Type":"ContainerDied","Data":"3ef56082feb2047e5f5abceca11fa66d3c8564d836bb139df40edb857d44a735"} Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.225218 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393076 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-combined-ca-bundle\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393432 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-inventory\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393465 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8kmm\" (UniqueName: \"kubernetes.io/projected/17ce2002-16df-42d4-b9c3-e9bc15ad020f-kube-api-access-f8kmm\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393529 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ssh-key-openstack-cell1\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393586 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-2\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393621 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-1\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393674 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-0\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393721 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-3\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393783 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ceph\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393829 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-1\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393885 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-1\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393936 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-0\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.393973 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-0\") pod \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\" (UID: \"17ce2002-16df-42d4-b9c3-e9bc15ad020f\") " Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.403771 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17ce2002-16df-42d4-b9c3-e9bc15ad020f-kube-api-access-f8kmm" (OuterVolumeSpecName: "kube-api-access-f8kmm") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "kube-api-access-f8kmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.410436 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.418518 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ceph" (OuterVolumeSpecName: "ceph") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.426917 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.428394 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.430987 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.431235 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.432153 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-inventory" (OuterVolumeSpecName: "inventory") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.435935 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.438402 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.442189 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.443749 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-3" (OuterVolumeSpecName: "nova-cell1-compute-config-3") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "nova-cell1-compute-config-3". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.452782 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-2" (OuterVolumeSpecName: "nova-cell1-compute-config-2") pod "17ce2002-16df-42d4-b9c3-e9bc15ad020f" (UID: "17ce2002-16df-42d4-b9c3-e9bc15ad020f"). InnerVolumeSpecName "nova-cell1-compute-config-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496883 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496918 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496930 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-3\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496941 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496952 4816 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496961 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496969 4816 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496977 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496986 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.496995 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.497004 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8kmm\" (UniqueName: \"kubernetes.io/projected/17ce2002-16df-42d4-b9c3-e9bc15ad020f-kube-api-access-f8kmm\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.497014 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.497022 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/17ce2002-16df-42d4-b9c3-e9bc15ad020f-nova-cell1-compute-config-2\") on node \"crc\" DevicePath \"\"" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.704642 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" event={"ID":"17ce2002-16df-42d4-b9c3-e9bc15ad020f","Type":"ContainerDied","Data":"5935d2249d0f38216bea92847e7579f3bf8136c220025a37f4c61578a7078e32"} Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.704769 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5935d2249d0f38216bea92847e7579f3bf8136c220025a37f4c61578a7078e32" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.704734 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-sxj25" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.803104 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-xnj4h"] Feb 16 15:16:51 crc kubenswrapper[4816]: E0216 15:16:51.803639 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17ce2002-16df-42d4-b9c3-e9bc15ad020f" containerName="nova-cell1-openstack-openstack-cell1" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.803673 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="17ce2002-16df-42d4-b9c3-e9bc15ad020f" containerName="nova-cell1-openstack-openstack-cell1" Feb 16 15:16:51 crc kubenswrapper[4816]: E0216 15:16:51.803704 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a85fa645-afdd-4783-a81c-a88a4602206c" containerName="collect-profiles" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.803710 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a85fa645-afdd-4783-a81c-a88a4602206c" containerName="collect-profiles" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.804087 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="17ce2002-16df-42d4-b9c3-e9bc15ad020f" containerName="nova-cell1-openstack-openstack-cell1" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.804107 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a85fa645-afdd-4783-a81c-a88a4602206c" containerName="collect-profiles" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.805223 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.808812 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.809083 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.809221 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.809356 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.809467 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.825882 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-xnj4h"] Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.908226 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.908276 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ssh-key-openstack-cell1\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.908458 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-inventory\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.908760 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.908850 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcq7r\" (UniqueName: \"kubernetes.io/projected/d247672c-f503-4d85-a33e-b01084c23db5-kube-api-access-pcq7r\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.909051 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.909140 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:51 crc kubenswrapper[4816]: I0216 15:16:51.909266 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceph\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.012478 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.012576 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.012681 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceph\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.012802 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ssh-key-openstack-cell1\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.012826 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.012870 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-inventory\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.012940 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.012982 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcq7r\" (UniqueName: \"kubernetes.io/projected/d247672c-f503-4d85-a33e-b01084c23db5-kube-api-access-pcq7r\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.016217 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.016525 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ssh-key-openstack-cell1\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.016852 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.018138 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.018726 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-inventory\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.019578 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceph\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.020354 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.031942 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcq7r\" (UniqueName: \"kubernetes.io/projected/d247672c-f503-4d85-a33e-b01084c23db5-kube-api-access-pcq7r\") pod \"telemetry-openstack-openstack-cell1-xnj4h\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.132287 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.664545 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-xnj4h"] Feb 16 15:16:52 crc kubenswrapper[4816]: I0216 15:16:52.718535 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" event={"ID":"d247672c-f503-4d85-a33e-b01084c23db5","Type":"ContainerStarted","Data":"86451be08f7d359357babb1c28324a42f39f4d5314bd0b0f0e457bf60a2cf5fa"} Feb 16 15:16:53 crc kubenswrapper[4816]: I0216 15:16:53.729420 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" event={"ID":"d247672c-f503-4d85-a33e-b01084c23db5","Type":"ContainerStarted","Data":"a5d0ac1d4e03bf51f1d6974039e7c25d9fc15d9e75c877de95073c11521806ce"} Feb 16 15:16:53 crc kubenswrapper[4816]: I0216 15:16:53.758241 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" podStartSLOduration=2.29247284 podStartE2EDuration="2.75817928s" podCreationTimestamp="2026-02-16 15:16:51 +0000 UTC" firstStartedPulling="2026-02-16 15:16:52.673098444 +0000 UTC m=+8011.999812172" lastFinishedPulling="2026-02-16 15:16:53.138804894 +0000 UTC m=+8012.465518612" observedRunningTime="2026-02-16 15:16:53.746391509 +0000 UTC m=+8013.073105237" watchObservedRunningTime="2026-02-16 15:16:53.75817928 +0000 UTC m=+8013.084893008" Feb 16 15:18:36 crc kubenswrapper[4816]: I0216 15:18:36.940718 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:18:36 crc kubenswrapper[4816]: I0216 15:18:36.941219 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:19:06 crc kubenswrapper[4816]: I0216 15:19:06.940691 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:19:06 crc kubenswrapper[4816]: I0216 15:19:06.941380 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:19:36 crc kubenswrapper[4816]: I0216 15:19:36.941077 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:19:36 crc kubenswrapper[4816]: I0216 15:19:36.941700 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:19:36 crc kubenswrapper[4816]: I0216 15:19:36.941774 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 15:19:36 crc kubenswrapper[4816]: I0216 15:19:36.942786 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 15:19:36 crc kubenswrapper[4816]: I0216 15:19:36.942907 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" gracePeriod=600 Feb 16 15:19:37 crc kubenswrapper[4816]: E0216 15:19:37.066771 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:19:38 crc kubenswrapper[4816]: I0216 15:19:38.048364 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" exitCode=0 Feb 16 15:19:38 crc kubenswrapper[4816]: I0216 15:19:38.048483 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3"} Feb 16 15:19:38 crc kubenswrapper[4816]: I0216 15:19:38.048804 4816 scope.go:117] "RemoveContainer" containerID="abc3f764bb4b3f381409d82a3515b5121ee9b603be7bb865024b2e6079e576ca" Feb 16 15:19:38 crc kubenswrapper[4816]: I0216 15:19:38.049791 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:19:38 crc kubenswrapper[4816]: E0216 15:19:38.050746 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.760910 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6tb27"] Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.763829 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.772842 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6tb27"] Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.889351 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-catalog-content\") pod \"redhat-operators-6tb27\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.889547 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr5qr\" (UniqueName: \"kubernetes.io/projected/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-kube-api-access-tr5qr\") pod \"redhat-operators-6tb27\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.889628 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-utilities\") pod \"redhat-operators-6tb27\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.992257 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-catalog-content\") pod \"redhat-operators-6tb27\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.992752 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr5qr\" (UniqueName: \"kubernetes.io/projected/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-kube-api-access-tr5qr\") pod \"redhat-operators-6tb27\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.992954 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-utilities\") pod \"redhat-operators-6tb27\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.993081 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-catalog-content\") pod \"redhat-operators-6tb27\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:48 crc kubenswrapper[4816]: I0216 15:19:48.993514 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-utilities\") pod \"redhat-operators-6tb27\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:49 crc kubenswrapper[4816]: I0216 15:19:49.019503 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr5qr\" (UniqueName: \"kubernetes.io/projected/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-kube-api-access-tr5qr\") pod \"redhat-operators-6tb27\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:49 crc kubenswrapper[4816]: I0216 15:19:49.084394 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:49 crc kubenswrapper[4816]: I0216 15:19:49.404358 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:19:49 crc kubenswrapper[4816]: E0216 15:19:49.407218 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:19:49 crc kubenswrapper[4816]: I0216 15:19:49.632267 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6tb27"] Feb 16 15:19:50 crc kubenswrapper[4816]: I0216 15:19:50.169136 4816 generic.go:334] "Generic (PLEG): container finished" podID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerID="b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be" exitCode=0 Feb 16 15:19:50 crc kubenswrapper[4816]: I0216 15:19:50.169244 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6tb27" event={"ID":"a8591e73-aed8-4503-88b8-7ae5ae8a61ca","Type":"ContainerDied","Data":"b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be"} Feb 16 15:19:50 crc kubenswrapper[4816]: I0216 15:19:50.171553 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6tb27" event={"ID":"a8591e73-aed8-4503-88b8-7ae5ae8a61ca","Type":"ContainerStarted","Data":"fd973fd97b4edb74fbdce09500fd67b733d6844dc75d9b53a6956f4d64b89490"} Feb 16 15:19:50 crc kubenswrapper[4816]: I0216 15:19:50.171572 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 15:19:51 crc kubenswrapper[4816]: I0216 15:19:51.342040 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6tb27" event={"ID":"a8591e73-aed8-4503-88b8-7ae5ae8a61ca","Type":"ContainerStarted","Data":"b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c"} Feb 16 15:19:56 crc kubenswrapper[4816]: I0216 15:19:56.398191 4816 generic.go:334] "Generic (PLEG): container finished" podID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerID="b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c" exitCode=0 Feb 16 15:19:56 crc kubenswrapper[4816]: I0216 15:19:56.398763 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6tb27" event={"ID":"a8591e73-aed8-4503-88b8-7ae5ae8a61ca","Type":"ContainerDied","Data":"b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c"} Feb 16 15:19:57 crc kubenswrapper[4816]: I0216 15:19:57.415062 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6tb27" event={"ID":"a8591e73-aed8-4503-88b8-7ae5ae8a61ca","Type":"ContainerStarted","Data":"4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9"} Feb 16 15:19:57 crc kubenswrapper[4816]: I0216 15:19:57.460040 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6tb27" podStartSLOduration=2.832840094 podStartE2EDuration="9.460001258s" podCreationTimestamp="2026-02-16 15:19:48 +0000 UTC" firstStartedPulling="2026-02-16 15:19:50.171300172 +0000 UTC m=+8189.498013890" lastFinishedPulling="2026-02-16 15:19:56.798461336 +0000 UTC m=+8196.125175054" observedRunningTime="2026-02-16 15:19:57.446487461 +0000 UTC m=+8196.773201229" watchObservedRunningTime="2026-02-16 15:19:57.460001258 +0000 UTC m=+8196.786714986" Feb 16 15:19:59 crc kubenswrapper[4816]: I0216 15:19:59.085274 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:19:59 crc kubenswrapper[4816]: I0216 15:19:59.085608 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:20:00 crc kubenswrapper[4816]: I0216 15:20:00.143034 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6tb27" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerName="registry-server" probeResult="failure" output=< Feb 16 15:20:00 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 15:20:00 crc kubenswrapper[4816]: > Feb 16 15:20:02 crc kubenswrapper[4816]: I0216 15:20:02.399456 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:20:02 crc kubenswrapper[4816]: E0216 15:20:02.400054 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.141069 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-48llc"] Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.146223 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.157791 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-48llc"] Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.233904 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-catalog-content\") pod \"redhat-marketplace-48llc\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.234034 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-utilities\") pod \"redhat-marketplace-48llc\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.234108 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvcqf\" (UniqueName: \"kubernetes.io/projected/300a5482-db46-4c1e-8a09-caa45d558877-kube-api-access-cvcqf\") pod \"redhat-marketplace-48llc\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.335806 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-catalog-content\") pod \"redhat-marketplace-48llc\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.335931 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-utilities\") pod \"redhat-marketplace-48llc\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.335999 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvcqf\" (UniqueName: \"kubernetes.io/projected/300a5482-db46-4c1e-8a09-caa45d558877-kube-api-access-cvcqf\") pod \"redhat-marketplace-48llc\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.336829 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-catalog-content\") pod \"redhat-marketplace-48llc\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.336889 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-utilities\") pod \"redhat-marketplace-48llc\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.361344 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvcqf\" (UniqueName: \"kubernetes.io/projected/300a5482-db46-4c1e-8a09-caa45d558877-kube-api-access-cvcqf\") pod \"redhat-marketplace-48llc\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:07 crc kubenswrapper[4816]: I0216 15:20:07.475569 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:08 crc kubenswrapper[4816]: I0216 15:20:08.206489 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-48llc"] Feb 16 15:20:08 crc kubenswrapper[4816]: I0216 15:20:08.637200 4816 generic.go:334] "Generic (PLEG): container finished" podID="300a5482-db46-4c1e-8a09-caa45d558877" containerID="713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d" exitCode=0 Feb 16 15:20:08 crc kubenswrapper[4816]: I0216 15:20:08.637256 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48llc" event={"ID":"300a5482-db46-4c1e-8a09-caa45d558877","Type":"ContainerDied","Data":"713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d"} Feb 16 15:20:08 crc kubenswrapper[4816]: I0216 15:20:08.637536 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48llc" event={"ID":"300a5482-db46-4c1e-8a09-caa45d558877","Type":"ContainerStarted","Data":"dd81f313f4d2a32058365af50e4aaccded784fd32470318f849e7a010574cff5"} Feb 16 15:20:09 crc kubenswrapper[4816]: I0216 15:20:09.177904 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:20:09 crc kubenswrapper[4816]: I0216 15:20:09.233834 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:20:09 crc kubenswrapper[4816]: I0216 15:20:09.649149 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48llc" event={"ID":"300a5482-db46-4c1e-8a09-caa45d558877","Type":"ContainerStarted","Data":"9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b"} Feb 16 15:20:10 crc kubenswrapper[4816]: I0216 15:20:10.662342 4816 generic.go:334] "Generic (PLEG): container finished" podID="300a5482-db46-4c1e-8a09-caa45d558877" containerID="9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b" exitCode=0 Feb 16 15:20:10 crc kubenswrapper[4816]: I0216 15:20:10.662508 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48llc" event={"ID":"300a5482-db46-4c1e-8a09-caa45d558877","Type":"ContainerDied","Data":"9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b"} Feb 16 15:20:11 crc kubenswrapper[4816]: I0216 15:20:11.497400 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6tb27"] Feb 16 15:20:11 crc kubenswrapper[4816]: I0216 15:20:11.497999 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6tb27" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerName="registry-server" containerID="cri-o://4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9" gracePeriod=2 Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.524630 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.537776 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-catalog-content\") pod \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.537838 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tr5qr\" (UniqueName: \"kubernetes.io/projected/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-kube-api-access-tr5qr\") pod \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.537916 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-utilities\") pod \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\" (UID: \"a8591e73-aed8-4503-88b8-7ae5ae8a61ca\") " Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.539038 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-utilities" (OuterVolumeSpecName: "utilities") pod "a8591e73-aed8-4503-88b8-7ae5ae8a61ca" (UID: "a8591e73-aed8-4503-88b8-7ae5ae8a61ca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.546468 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-kube-api-access-tr5qr" (OuterVolumeSpecName: "kube-api-access-tr5qr") pod "a8591e73-aed8-4503-88b8-7ae5ae8a61ca" (UID: "a8591e73-aed8-4503-88b8-7ae5ae8a61ca"). InnerVolumeSpecName "kube-api-access-tr5qr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.639502 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tr5qr\" (UniqueName: \"kubernetes.io/projected/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-kube-api-access-tr5qr\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.639814 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.682099 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6tb27" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.682127 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6tb27" event={"ID":"a8591e73-aed8-4503-88b8-7ae5ae8a61ca","Type":"ContainerDied","Data":"4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9"} Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.682175 4816 scope.go:117] "RemoveContainer" containerID="4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.682081 4816 generic.go:334] "Generic (PLEG): container finished" podID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerID="4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9" exitCode=0 Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.682394 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6tb27" event={"ID":"a8591e73-aed8-4503-88b8-7ae5ae8a61ca","Type":"ContainerDied","Data":"fd973fd97b4edb74fbdce09500fd67b733d6844dc75d9b53a6956f4d64b89490"} Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.687422 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48llc" event={"ID":"300a5482-db46-4c1e-8a09-caa45d558877","Type":"ContainerStarted","Data":"23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101"} Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.692550 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8591e73-aed8-4503-88b8-7ae5ae8a61ca" (UID: "a8591e73-aed8-4503-88b8-7ae5ae8a61ca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.703050 4816 scope.go:117] "RemoveContainer" containerID="b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.733573 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-48llc" podStartSLOduration=3.077413524 podStartE2EDuration="5.733549164s" podCreationTimestamp="2026-02-16 15:20:07 +0000 UTC" firstStartedPulling="2026-02-16 15:20:08.63922706 +0000 UTC m=+8207.965940788" lastFinishedPulling="2026-02-16 15:20:11.2953627 +0000 UTC m=+8210.622076428" observedRunningTime="2026-02-16 15:20:12.711013061 +0000 UTC m=+8212.037726789" watchObservedRunningTime="2026-02-16 15:20:12.733549164 +0000 UTC m=+8212.060262892" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.742007 4816 scope.go:117] "RemoveContainer" containerID="b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.748872 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8591e73-aed8-4503-88b8-7ae5ae8a61ca-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.794986 4816 scope.go:117] "RemoveContainer" containerID="4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9" Feb 16 15:20:12 crc kubenswrapper[4816]: E0216 15:20:12.795547 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9\": container with ID starting with 4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9 not found: ID does not exist" containerID="4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.796105 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9"} err="failed to get container status \"4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9\": rpc error: code = NotFound desc = could not find container \"4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9\": container with ID starting with 4db0a8e83798843327541ec56cfa59ff4ba05ce904529c83ca7e78c63af2b2d9 not found: ID does not exist" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.796136 4816 scope.go:117] "RemoveContainer" containerID="b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c" Feb 16 15:20:12 crc kubenswrapper[4816]: E0216 15:20:12.796581 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c\": container with ID starting with b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c not found: ID does not exist" containerID="b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.796624 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c"} err="failed to get container status \"b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c\": rpc error: code = NotFound desc = could not find container \"b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c\": container with ID starting with b9548134e9e183bb330ae27ef028b191396a47e6b2ee846d9acc293d4e31165c not found: ID does not exist" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.796645 4816 scope.go:117] "RemoveContainer" containerID="b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be" Feb 16 15:20:12 crc kubenswrapper[4816]: E0216 15:20:12.797015 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be\": container with ID starting with b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be not found: ID does not exist" containerID="b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be" Feb 16 15:20:12 crc kubenswrapper[4816]: I0216 15:20:12.797041 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be"} err="failed to get container status \"b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be\": rpc error: code = NotFound desc = could not find container \"b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be\": container with ID starting with b853a2decbe8e641fd0e009016a0559ddae2844063716e50e193820f371285be not found: ID does not exist" Feb 16 15:20:13 crc kubenswrapper[4816]: I0216 15:20:13.021374 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6tb27"] Feb 16 15:20:13 crc kubenswrapper[4816]: I0216 15:20:13.036341 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6tb27"] Feb 16 15:20:13 crc kubenswrapper[4816]: I0216 15:20:13.415810 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" path="/var/lib/kubelet/pods/a8591e73-aed8-4503-88b8-7ae5ae8a61ca/volumes" Feb 16 15:20:15 crc kubenswrapper[4816]: I0216 15:20:15.399022 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:20:15 crc kubenswrapper[4816]: E0216 15:20:15.399873 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:20:17 crc kubenswrapper[4816]: I0216 15:20:17.629369 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:17 crc kubenswrapper[4816]: I0216 15:20:17.632424 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:17 crc kubenswrapper[4816]: I0216 15:20:17.691455 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:17 crc kubenswrapper[4816]: I0216 15:20:17.790055 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:17 crc kubenswrapper[4816]: I0216 15:20:17.929087 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-48llc"] Feb 16 15:20:19 crc kubenswrapper[4816]: I0216 15:20:19.759412 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-48llc" podUID="300a5482-db46-4c1e-8a09-caa45d558877" containerName="registry-server" containerID="cri-o://23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101" gracePeriod=2 Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.289127 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.388559 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-catalog-content\") pod \"300a5482-db46-4c1e-8a09-caa45d558877\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.388785 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvcqf\" (UniqueName: \"kubernetes.io/projected/300a5482-db46-4c1e-8a09-caa45d558877-kube-api-access-cvcqf\") pod \"300a5482-db46-4c1e-8a09-caa45d558877\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.388819 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-utilities\") pod \"300a5482-db46-4c1e-8a09-caa45d558877\" (UID: \"300a5482-db46-4c1e-8a09-caa45d558877\") " Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.390084 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-utilities" (OuterVolumeSpecName: "utilities") pod "300a5482-db46-4c1e-8a09-caa45d558877" (UID: "300a5482-db46-4c1e-8a09-caa45d558877"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.410582 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/300a5482-db46-4c1e-8a09-caa45d558877-kube-api-access-cvcqf" (OuterVolumeSpecName: "kube-api-access-cvcqf") pod "300a5482-db46-4c1e-8a09-caa45d558877" (UID: "300a5482-db46-4c1e-8a09-caa45d558877"). InnerVolumeSpecName "kube-api-access-cvcqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.422039 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "300a5482-db46-4c1e-8a09-caa45d558877" (UID: "300a5482-db46-4c1e-8a09-caa45d558877"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.491426 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvcqf\" (UniqueName: \"kubernetes.io/projected/300a5482-db46-4c1e-8a09-caa45d558877-kube-api-access-cvcqf\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.491461 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.491477 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300a5482-db46-4c1e-8a09-caa45d558877-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.774835 4816 generic.go:334] "Generic (PLEG): container finished" podID="300a5482-db46-4c1e-8a09-caa45d558877" containerID="23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101" exitCode=0 Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.774889 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48llc" event={"ID":"300a5482-db46-4c1e-8a09-caa45d558877","Type":"ContainerDied","Data":"23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101"} Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.774934 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48llc" event={"ID":"300a5482-db46-4c1e-8a09-caa45d558877","Type":"ContainerDied","Data":"dd81f313f4d2a32058365af50e4aaccded784fd32470318f849e7a010574cff5"} Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.774959 4816 scope.go:117] "RemoveContainer" containerID="23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.776775 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48llc" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.812697 4816 scope.go:117] "RemoveContainer" containerID="9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.828801 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-48llc"] Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.842247 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-48llc"] Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.850813 4816 scope.go:117] "RemoveContainer" containerID="713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.900915 4816 scope.go:117] "RemoveContainer" containerID="23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101" Feb 16 15:20:20 crc kubenswrapper[4816]: E0216 15:20:20.901635 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101\": container with ID starting with 23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101 not found: ID does not exist" containerID="23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.901694 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101"} err="failed to get container status \"23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101\": rpc error: code = NotFound desc = could not find container \"23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101\": container with ID starting with 23271d10e03774d0787dd58d2a0ae004defb2be991ef84384db0858ced3d5101 not found: ID does not exist" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.901746 4816 scope.go:117] "RemoveContainer" containerID="9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b" Feb 16 15:20:20 crc kubenswrapper[4816]: E0216 15:20:20.902209 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b\": container with ID starting with 9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b not found: ID does not exist" containerID="9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.902245 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b"} err="failed to get container status \"9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b\": rpc error: code = NotFound desc = could not find container \"9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b\": container with ID starting with 9f69ebeee0a96dd4404e3a4e6933ae8ab307f69061538289f6b6aefb8f7ce33b not found: ID does not exist" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.902266 4816 scope.go:117] "RemoveContainer" containerID="713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d" Feb 16 15:20:20 crc kubenswrapper[4816]: E0216 15:20:20.902699 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d\": container with ID starting with 713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d not found: ID does not exist" containerID="713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d" Feb 16 15:20:20 crc kubenswrapper[4816]: I0216 15:20:20.902804 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d"} err="failed to get container status \"713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d\": rpc error: code = NotFound desc = could not find container \"713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d\": container with ID starting with 713cb6d350ca8c2cb17267934c1ba5b78128fbe4e9d56b036129bca56f5a7b2d not found: ID does not exist" Feb 16 15:20:21 crc kubenswrapper[4816]: I0216 15:20:21.437483 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="300a5482-db46-4c1e-8a09-caa45d558877" path="/var/lib/kubelet/pods/300a5482-db46-4c1e-8a09-caa45d558877/volumes" Feb 16 15:20:21 crc kubenswrapper[4816]: I0216 15:20:21.790198 4816 generic.go:334] "Generic (PLEG): container finished" podID="d247672c-f503-4d85-a33e-b01084c23db5" containerID="a5d0ac1d4e03bf51f1d6974039e7c25d9fc15d9e75c877de95073c11521806ce" exitCode=0 Feb 16 15:20:21 crc kubenswrapper[4816]: I0216 15:20:21.790240 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" event={"ID":"d247672c-f503-4d85-a33e-b01084c23db5","Type":"ContainerDied","Data":"a5d0ac1d4e03bf51f1d6974039e7c25d9fc15d9e75c877de95073c11521806ce"} Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.291151 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.462221 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-0\") pod \"d247672c-f503-4d85-a33e-b01084c23db5\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.462381 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceph\") pod \"d247672c-f503-4d85-a33e-b01084c23db5\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.462461 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ssh-key-openstack-cell1\") pod \"d247672c-f503-4d85-a33e-b01084c23db5\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.462500 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-inventory\") pod \"d247672c-f503-4d85-a33e-b01084c23db5\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.462527 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-2\") pod \"d247672c-f503-4d85-a33e-b01084c23db5\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.462573 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-telemetry-combined-ca-bundle\") pod \"d247672c-f503-4d85-a33e-b01084c23db5\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.462617 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-1\") pod \"d247672c-f503-4d85-a33e-b01084c23db5\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.462633 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcq7r\" (UniqueName: \"kubernetes.io/projected/d247672c-f503-4d85-a33e-b01084c23db5-kube-api-access-pcq7r\") pod \"d247672c-f503-4d85-a33e-b01084c23db5\" (UID: \"d247672c-f503-4d85-a33e-b01084c23db5\") " Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.469767 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d247672c-f503-4d85-a33e-b01084c23db5-kube-api-access-pcq7r" (OuterVolumeSpecName: "kube-api-access-pcq7r") pod "d247672c-f503-4d85-a33e-b01084c23db5" (UID: "d247672c-f503-4d85-a33e-b01084c23db5"). InnerVolumeSpecName "kube-api-access-pcq7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.470857 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "d247672c-f503-4d85-a33e-b01084c23db5" (UID: "d247672c-f503-4d85-a33e-b01084c23db5"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.477893 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceph" (OuterVolumeSpecName: "ceph") pod "d247672c-f503-4d85-a33e-b01084c23db5" (UID: "d247672c-f503-4d85-a33e-b01084c23db5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.493884 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "d247672c-f503-4d85-a33e-b01084c23db5" (UID: "d247672c-f503-4d85-a33e-b01084c23db5"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.495886 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "d247672c-f503-4d85-a33e-b01084c23db5" (UID: "d247672c-f503-4d85-a33e-b01084c23db5"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.497562 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "d247672c-f503-4d85-a33e-b01084c23db5" (UID: "d247672c-f503-4d85-a33e-b01084c23db5"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.500648 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "d247672c-f503-4d85-a33e-b01084c23db5" (UID: "d247672c-f503-4d85-a33e-b01084c23db5"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.507961 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-inventory" (OuterVolumeSpecName: "inventory") pod "d247672c-f503-4d85-a33e-b01084c23db5" (UID: "d247672c-f503-4d85-a33e-b01084c23db5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.564918 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.564954 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.564965 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.564975 4816 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.564983 4816 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.564993 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcq7r\" (UniqueName: \"kubernetes.io/projected/d247672c-f503-4d85-a33e-b01084c23db5-kube-api-access-pcq7r\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.565001 4816 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.565010 4816 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d247672c-f503-4d85-a33e-b01084c23db5-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.813349 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" event={"ID":"d247672c-f503-4d85-a33e-b01084c23db5","Type":"ContainerDied","Data":"86451be08f7d359357babb1c28324a42f39f4d5314bd0b0f0e457bf60a2cf5fa"} Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.813395 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86451be08f7d359357babb1c28324a42f39f4d5314bd0b0f0e457bf60a2cf5fa" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.813796 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-xnj4h" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.933821 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-pzqvv"] Feb 16 15:20:23 crc kubenswrapper[4816]: E0216 15:20:23.934633 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerName="extract-utilities" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.934808 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerName="extract-utilities" Feb 16 15:20:23 crc kubenswrapper[4816]: E0216 15:20:23.934841 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerName="registry-server" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.934851 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerName="registry-server" Feb 16 15:20:23 crc kubenswrapper[4816]: E0216 15:20:23.934875 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300a5482-db46-4c1e-8a09-caa45d558877" containerName="extract-content" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.935059 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="300a5482-db46-4c1e-8a09-caa45d558877" containerName="extract-content" Feb 16 15:20:23 crc kubenswrapper[4816]: E0216 15:20:23.935077 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300a5482-db46-4c1e-8a09-caa45d558877" containerName="extract-utilities" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.935086 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="300a5482-db46-4c1e-8a09-caa45d558877" containerName="extract-utilities" Feb 16 15:20:23 crc kubenswrapper[4816]: E0216 15:20:23.935109 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300a5482-db46-4c1e-8a09-caa45d558877" containerName="registry-server" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.935120 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="300a5482-db46-4c1e-8a09-caa45d558877" containerName="registry-server" Feb 16 15:20:23 crc kubenswrapper[4816]: E0216 15:20:23.935141 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d247672c-f503-4d85-a33e-b01084c23db5" containerName="telemetry-openstack-openstack-cell1" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.935152 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d247672c-f503-4d85-a33e-b01084c23db5" containerName="telemetry-openstack-openstack-cell1" Feb 16 15:20:23 crc kubenswrapper[4816]: E0216 15:20:23.935174 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerName="extract-content" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.935185 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerName="extract-content" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.935458 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="300a5482-db46-4c1e-8a09-caa45d558877" containerName="registry-server" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.935496 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8591e73-aed8-4503-88b8-7ae5ae8a61ca" containerName="registry-server" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.935517 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d247672c-f503-4d85-a33e-b01084c23db5" containerName="telemetry-openstack-openstack-cell1" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.936689 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.942307 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.942307 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.942549 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.942601 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.942683 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:20:23 crc kubenswrapper[4816]: I0216 15:20:23.953865 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-pzqvv"] Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.080003 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.080262 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.080324 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.080429 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ssh-key-openstack-cell1\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.080587 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zn9m\" (UniqueName: \"kubernetes.io/projected/77aba700-c238-4ca0-94cf-f596d763e1a2-kube-api-access-4zn9m\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.081122 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.183199 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zn9m\" (UniqueName: \"kubernetes.io/projected/77aba700-c238-4ca0-94cf-f596d763e1a2-kube-api-access-4zn9m\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.183408 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.183545 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.183616 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.183672 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.183716 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ssh-key-openstack-cell1\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.187382 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.187449 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.192166 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ssh-key-openstack-cell1\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.192482 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.193117 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.202525 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zn9m\" (UniqueName: \"kubernetes.io/projected/77aba700-c238-4ca0-94cf-f596d763e1a2-kube-api-access-4zn9m\") pod \"neutron-sriov-openstack-openstack-cell1-pzqvv\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.255498 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:20:24 crc kubenswrapper[4816]: I0216 15:20:24.817311 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-pzqvv"] Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.656956 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kjpmn"] Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.659626 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.678011 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kjpmn"] Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.829671 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-utilities\") pod \"certified-operators-kjpmn\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.829759 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26mbw\" (UniqueName: \"kubernetes.io/projected/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-kube-api-access-26mbw\") pod \"certified-operators-kjpmn\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.829805 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-catalog-content\") pod \"certified-operators-kjpmn\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.839937 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" event={"ID":"77aba700-c238-4ca0-94cf-f596d763e1a2","Type":"ContainerStarted","Data":"2e224710a851dfddd284f7adf4c9f3fb3dd21b3435b7225cfb9003a41cc57727"} Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.839992 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" event={"ID":"77aba700-c238-4ca0-94cf-f596d763e1a2","Type":"ContainerStarted","Data":"5ce6e8f260dc531c3320574fcb8ee50143c500438e8bd6474d837fda634b5be2"} Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.871921 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" podStartSLOduration=2.479620105 podStartE2EDuration="2.871863848s" podCreationTimestamp="2026-02-16 15:20:23 +0000 UTC" firstStartedPulling="2026-02-16 15:20:24.820819658 +0000 UTC m=+8224.147533386" lastFinishedPulling="2026-02-16 15:20:25.213063401 +0000 UTC m=+8224.539777129" observedRunningTime="2026-02-16 15:20:25.864736974 +0000 UTC m=+8225.191450712" watchObservedRunningTime="2026-02-16 15:20:25.871863848 +0000 UTC m=+8225.198577576" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.932010 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-utilities\") pod \"certified-operators-kjpmn\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.932515 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-utilities\") pod \"certified-operators-kjpmn\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.932550 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26mbw\" (UniqueName: \"kubernetes.io/projected/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-kube-api-access-26mbw\") pod \"certified-operators-kjpmn\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.932719 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-catalog-content\") pod \"certified-operators-kjpmn\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.933155 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-catalog-content\") pod \"certified-operators-kjpmn\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.957413 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26mbw\" (UniqueName: \"kubernetes.io/projected/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-kube-api-access-26mbw\") pod \"certified-operators-kjpmn\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:25 crc kubenswrapper[4816]: I0216 15:20:25.983205 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:26 crc kubenswrapper[4816]: I0216 15:20:26.572967 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kjpmn"] Feb 16 15:20:26 crc kubenswrapper[4816]: W0216 15:20:26.575272 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d4e09a4_c0d8_4bf4_b360_fca95c1ccf0a.slice/crio-ace81fa2f6a1549b78d0788385dec6c343f7a89bad1184b1991812d03c83c00a WatchSource:0}: Error finding container ace81fa2f6a1549b78d0788385dec6c343f7a89bad1184b1991812d03c83c00a: Status 404 returned error can't find the container with id ace81fa2f6a1549b78d0788385dec6c343f7a89bad1184b1991812d03c83c00a Feb 16 15:20:26 crc kubenswrapper[4816]: I0216 15:20:26.851772 4816 generic.go:334] "Generic (PLEG): container finished" podID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerID="b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6" exitCode=0 Feb 16 15:20:26 crc kubenswrapper[4816]: I0216 15:20:26.851854 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjpmn" event={"ID":"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a","Type":"ContainerDied","Data":"b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6"} Feb 16 15:20:26 crc kubenswrapper[4816]: I0216 15:20:26.852158 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjpmn" event={"ID":"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a","Type":"ContainerStarted","Data":"ace81fa2f6a1549b78d0788385dec6c343f7a89bad1184b1991812d03c83c00a"} Feb 16 15:20:27 crc kubenswrapper[4816]: I0216 15:20:27.398441 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:20:27 crc kubenswrapper[4816]: E0216 15:20:27.399176 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:20:27 crc kubenswrapper[4816]: I0216 15:20:27.863423 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjpmn" event={"ID":"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a","Type":"ContainerStarted","Data":"92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693"} Feb 16 15:20:29 crc kubenswrapper[4816]: I0216 15:20:29.892215 4816 generic.go:334] "Generic (PLEG): container finished" podID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerID="92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693" exitCode=0 Feb 16 15:20:29 crc kubenswrapper[4816]: I0216 15:20:29.892309 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjpmn" event={"ID":"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a","Type":"ContainerDied","Data":"92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693"} Feb 16 15:20:30 crc kubenswrapper[4816]: I0216 15:20:30.904961 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjpmn" event={"ID":"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a","Type":"ContainerStarted","Data":"67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335"} Feb 16 15:20:30 crc kubenswrapper[4816]: I0216 15:20:30.934517 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kjpmn" podStartSLOduration=2.493684714 podStartE2EDuration="5.934485544s" podCreationTimestamp="2026-02-16 15:20:25 +0000 UTC" firstStartedPulling="2026-02-16 15:20:26.854725705 +0000 UTC m=+8226.181439433" lastFinishedPulling="2026-02-16 15:20:30.295526535 +0000 UTC m=+8229.622240263" observedRunningTime="2026-02-16 15:20:30.925023787 +0000 UTC m=+8230.251737525" watchObservedRunningTime="2026-02-16 15:20:30.934485544 +0000 UTC m=+8230.261199272" Feb 16 15:20:35 crc kubenswrapper[4816]: I0216 15:20:35.984299 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:35 crc kubenswrapper[4816]: I0216 15:20:35.986044 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:36 crc kubenswrapper[4816]: I0216 15:20:36.046527 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:37 crc kubenswrapper[4816]: I0216 15:20:37.014701 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:37 crc kubenswrapper[4816]: I0216 15:20:37.073237 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kjpmn"] Feb 16 15:20:38 crc kubenswrapper[4816]: I0216 15:20:38.980769 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kjpmn" podUID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerName="registry-server" containerID="cri-o://67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335" gracePeriod=2 Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.546442 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.698123 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26mbw\" (UniqueName: \"kubernetes.io/projected/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-kube-api-access-26mbw\") pod \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.698400 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-catalog-content\") pod \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.698451 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-utilities\") pod \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\" (UID: \"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a\") " Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.699554 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-utilities" (OuterVolumeSpecName: "utilities") pod "8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" (UID: "8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.706933 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-kube-api-access-26mbw" (OuterVolumeSpecName: "kube-api-access-26mbw") pod "8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" (UID: "8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a"). InnerVolumeSpecName "kube-api-access-26mbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.753741 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" (UID: "8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.800749 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26mbw\" (UniqueName: \"kubernetes.io/projected/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-kube-api-access-26mbw\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.800778 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.800788 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.993079 4816 generic.go:334] "Generic (PLEG): container finished" podID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerID="67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335" exitCode=0 Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.993124 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjpmn" event={"ID":"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a","Type":"ContainerDied","Data":"67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335"} Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.993149 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjpmn" event={"ID":"8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a","Type":"ContainerDied","Data":"ace81fa2f6a1549b78d0788385dec6c343f7a89bad1184b1991812d03c83c00a"} Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.993166 4816 scope.go:117] "RemoveContainer" containerID="67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335" Feb 16 15:20:39 crc kubenswrapper[4816]: I0216 15:20:39.994466 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjpmn" Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.054665 4816 scope.go:117] "RemoveContainer" containerID="92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693" Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.058192 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kjpmn"] Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.071071 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kjpmn"] Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.080035 4816 scope.go:117] "RemoveContainer" containerID="b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6" Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.147184 4816 scope.go:117] "RemoveContainer" containerID="67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335" Feb 16 15:20:40 crc kubenswrapper[4816]: E0216 15:20:40.147878 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335\": container with ID starting with 67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335 not found: ID does not exist" containerID="67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335" Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.147946 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335"} err="failed to get container status \"67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335\": rpc error: code = NotFound desc = could not find container \"67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335\": container with ID starting with 67acdb8f94b63ca18d633b454462f7904e16f2c7bc665812080a4f0588b83335 not found: ID does not exist" Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.147980 4816 scope.go:117] "RemoveContainer" containerID="92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693" Feb 16 15:20:40 crc kubenswrapper[4816]: E0216 15:20:40.148349 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693\": container with ID starting with 92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693 not found: ID does not exist" containerID="92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693" Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.148455 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693"} err="failed to get container status \"92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693\": rpc error: code = NotFound desc = could not find container \"92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693\": container with ID starting with 92873d267c9c003647c8ae1fecbb4012c589518e1473dd68e7b227523ddfa693 not found: ID does not exist" Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.148545 4816 scope.go:117] "RemoveContainer" containerID="b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6" Feb 16 15:20:40 crc kubenswrapper[4816]: E0216 15:20:40.148834 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6\": container with ID starting with b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6 not found: ID does not exist" containerID="b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6" Feb 16 15:20:40 crc kubenswrapper[4816]: I0216 15:20:40.148942 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6"} err="failed to get container status \"b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6\": rpc error: code = NotFound desc = could not find container \"b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6\": container with ID starting with b1f0f4bb04c5250d5b6f673f102448794aabc7add725bb3fe3b28d8f6c750ad6 not found: ID does not exist" Feb 16 15:20:41 crc kubenswrapper[4816]: I0216 15:20:41.454566 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" path="/var/lib/kubelet/pods/8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a/volumes" Feb 16 15:20:42 crc kubenswrapper[4816]: I0216 15:20:42.399259 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:20:42 crc kubenswrapper[4816]: E0216 15:20:42.400002 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:20:57 crc kubenswrapper[4816]: I0216 15:20:57.398645 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:20:57 crc kubenswrapper[4816]: E0216 15:20:57.399433 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:21:10 crc kubenswrapper[4816]: I0216 15:21:10.398470 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:21:10 crc kubenswrapper[4816]: E0216 15:21:10.399419 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:21:25 crc kubenswrapper[4816]: I0216 15:21:25.399163 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:21:25 crc kubenswrapper[4816]: E0216 15:21:25.400097 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:21:32 crc kubenswrapper[4816]: I0216 15:21:32.571113 4816 generic.go:334] "Generic (PLEG): container finished" podID="77aba700-c238-4ca0-94cf-f596d763e1a2" containerID="2e224710a851dfddd284f7adf4c9f3fb3dd21b3435b7225cfb9003a41cc57727" exitCode=0 Feb 16 15:21:32 crc kubenswrapper[4816]: I0216 15:21:32.571172 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" event={"ID":"77aba700-c238-4ca0-94cf-f596d763e1a2","Type":"ContainerDied","Data":"2e224710a851dfddd284f7adf4c9f3fb3dd21b3435b7225cfb9003a41cc57727"} Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.045941 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.231945 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-agent-neutron-config-0\") pod \"77aba700-c238-4ca0-94cf-f596d763e1a2\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.232090 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ceph\") pod \"77aba700-c238-4ca0-94cf-f596d763e1a2\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.232167 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-combined-ca-bundle\") pod \"77aba700-c238-4ca0-94cf-f596d763e1a2\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.232221 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ssh-key-openstack-cell1\") pod \"77aba700-c238-4ca0-94cf-f596d763e1a2\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.232246 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-inventory\") pod \"77aba700-c238-4ca0-94cf-f596d763e1a2\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.232304 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zn9m\" (UniqueName: \"kubernetes.io/projected/77aba700-c238-4ca0-94cf-f596d763e1a2-kube-api-access-4zn9m\") pod \"77aba700-c238-4ca0-94cf-f596d763e1a2\" (UID: \"77aba700-c238-4ca0-94cf-f596d763e1a2\") " Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.238244 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77aba700-c238-4ca0-94cf-f596d763e1a2-kube-api-access-4zn9m" (OuterVolumeSpecName: "kube-api-access-4zn9m") pod "77aba700-c238-4ca0-94cf-f596d763e1a2" (UID: "77aba700-c238-4ca0-94cf-f596d763e1a2"). InnerVolumeSpecName "kube-api-access-4zn9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.240035 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ceph" (OuterVolumeSpecName: "ceph") pod "77aba700-c238-4ca0-94cf-f596d763e1a2" (UID: "77aba700-c238-4ca0-94cf-f596d763e1a2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.242166 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "77aba700-c238-4ca0-94cf-f596d763e1a2" (UID: "77aba700-c238-4ca0-94cf-f596d763e1a2"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.262977 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-inventory" (OuterVolumeSpecName: "inventory") pod "77aba700-c238-4ca0-94cf-f596d763e1a2" (UID: "77aba700-c238-4ca0-94cf-f596d763e1a2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.272480 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "77aba700-c238-4ca0-94cf-f596d763e1a2" (UID: "77aba700-c238-4ca0-94cf-f596d763e1a2"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.278178 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "77aba700-c238-4ca0-94cf-f596d763e1a2" (UID: "77aba700-c238-4ca0-94cf-f596d763e1a2"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.334905 4816 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.334940 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.334952 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.334962 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zn9m\" (UniqueName: \"kubernetes.io/projected/77aba700-c238-4ca0-94cf-f596d763e1a2-kube-api-access-4zn9m\") on node \"crc\" DevicePath \"\"" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.334972 4816 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.334981 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77aba700-c238-4ca0-94cf-f596d763e1a2-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.595220 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" event={"ID":"77aba700-c238-4ca0-94cf-f596d763e1a2","Type":"ContainerDied","Data":"5ce6e8f260dc531c3320574fcb8ee50143c500438e8bd6474d837fda634b5be2"} Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.595727 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ce6e8f260dc531c3320574fcb8ee50143c500438e8bd6474d837fda634b5be2" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.595592 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-pzqvv" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.764891 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt"] Feb 16 15:21:34 crc kubenswrapper[4816]: E0216 15:21:34.765401 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerName="extract-utilities" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.765423 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerName="extract-utilities" Feb 16 15:21:34 crc kubenswrapper[4816]: E0216 15:21:34.765436 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77aba700-c238-4ca0-94cf-f596d763e1a2" containerName="neutron-sriov-openstack-openstack-cell1" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.765443 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="77aba700-c238-4ca0-94cf-f596d763e1a2" containerName="neutron-sriov-openstack-openstack-cell1" Feb 16 15:21:34 crc kubenswrapper[4816]: E0216 15:21:34.765452 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerName="registry-server" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.765458 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerName="registry-server" Feb 16 15:21:34 crc kubenswrapper[4816]: E0216 15:21:34.765473 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerName="extract-content" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.765478 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerName="extract-content" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.765697 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d4e09a4-c0d8-4bf4-b360-fca95c1ccf0a" containerName="registry-server" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.765719 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="77aba700-c238-4ca0-94cf-f596d763e1a2" containerName="neutron-sriov-openstack-openstack-cell1" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.766506 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.768631 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.768871 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.768927 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.769231 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.769810 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.774543 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt"] Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.895817 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ssh-key-openstack-cell1\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.895914 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kpps\" (UniqueName: \"kubernetes.io/projected/4c98be45-7f99-41f4-93dd-f57aa565492f-kube-api-access-4kpps\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.895967 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.896008 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.896073 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.896384 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.998332 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kpps\" (UniqueName: \"kubernetes.io/projected/4c98be45-7f99-41f4-93dd-f57aa565492f-kube-api-access-4kpps\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.998406 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.998433 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.998466 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.998558 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:34 crc kubenswrapper[4816]: I0216 15:21:34.998607 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ssh-key-openstack-cell1\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:35 crc kubenswrapper[4816]: I0216 15:21:35.003211 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ssh-key-openstack-cell1\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:35 crc kubenswrapper[4816]: I0216 15:21:35.004001 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:35 crc kubenswrapper[4816]: I0216 15:21:35.007179 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:35 crc kubenswrapper[4816]: I0216 15:21:35.008390 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:35 crc kubenswrapper[4816]: I0216 15:21:35.022971 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:35 crc kubenswrapper[4816]: I0216 15:21:35.026169 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kpps\" (UniqueName: \"kubernetes.io/projected/4c98be45-7f99-41f4-93dd-f57aa565492f-kube-api-access-4kpps\") pod \"neutron-dhcp-openstack-openstack-cell1-gvbjt\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:35 crc kubenswrapper[4816]: I0216 15:21:35.101785 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:21:35 crc kubenswrapper[4816]: I0216 15:21:35.657548 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt"] Feb 16 15:21:36 crc kubenswrapper[4816]: I0216 15:21:36.398792 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:21:36 crc kubenswrapper[4816]: E0216 15:21:36.400316 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:21:36 crc kubenswrapper[4816]: I0216 15:21:36.630177 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" event={"ID":"4c98be45-7f99-41f4-93dd-f57aa565492f","Type":"ContainerStarted","Data":"dd9efbe8ad07c707bb1e28de24fe119a0b6f0ddda256fc451408263bc94d9dbf"} Feb 16 15:21:36 crc kubenswrapper[4816]: I0216 15:21:36.630483 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" event={"ID":"4c98be45-7f99-41f4-93dd-f57aa565492f","Type":"ContainerStarted","Data":"bb5b10788de45d32d5266c2f5a81c09197469023f5efdfef32e5fb22e981e681"} Feb 16 15:21:36 crc kubenswrapper[4816]: I0216 15:21:36.650761 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" podStartSLOduration=2.038757574 podStartE2EDuration="2.650744509s" podCreationTimestamp="2026-02-16 15:21:34 +0000 UTC" firstStartedPulling="2026-02-16 15:21:35.662583348 +0000 UTC m=+8294.989297076" lastFinishedPulling="2026-02-16 15:21:36.274570273 +0000 UTC m=+8295.601284011" observedRunningTime="2026-02-16 15:21:36.648357094 +0000 UTC m=+8295.975070822" watchObservedRunningTime="2026-02-16 15:21:36.650744509 +0000 UTC m=+8295.977458237" Feb 16 15:21:47 crc kubenswrapper[4816]: I0216 15:21:47.399324 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:21:47 crc kubenswrapper[4816]: E0216 15:21:47.400034 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:22:00 crc kubenswrapper[4816]: I0216 15:22:00.399065 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:22:00 crc kubenswrapper[4816]: E0216 15:22:00.399939 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:22:11 crc kubenswrapper[4816]: I0216 15:22:11.409311 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:22:11 crc kubenswrapper[4816]: E0216 15:22:11.410349 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:22:26 crc kubenswrapper[4816]: I0216 15:22:26.398554 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:22:26 crc kubenswrapper[4816]: E0216 15:22:26.399235 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:22:37 crc kubenswrapper[4816]: I0216 15:22:37.399160 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:22:37 crc kubenswrapper[4816]: E0216 15:22:37.399910 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:22:52 crc kubenswrapper[4816]: I0216 15:22:52.400983 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:22:52 crc kubenswrapper[4816]: E0216 15:22:52.401977 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:22:53 crc kubenswrapper[4816]: I0216 15:22:53.434905 4816 generic.go:334] "Generic (PLEG): container finished" podID="4c98be45-7f99-41f4-93dd-f57aa565492f" containerID="dd9efbe8ad07c707bb1e28de24fe119a0b6f0ddda256fc451408263bc94d9dbf" exitCode=0 Feb 16 15:22:53 crc kubenswrapper[4816]: I0216 15:22:53.435235 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" event={"ID":"4c98be45-7f99-41f4-93dd-f57aa565492f","Type":"ContainerDied","Data":"dd9efbe8ad07c707bb1e28de24fe119a0b6f0ddda256fc451408263bc94d9dbf"} Feb 16 15:22:54 crc kubenswrapper[4816]: I0216 15:22:54.893402 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.067307 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ceph\") pod \"4c98be45-7f99-41f4-93dd-f57aa565492f\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.067378 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kpps\" (UniqueName: \"kubernetes.io/projected/4c98be45-7f99-41f4-93dd-f57aa565492f-kube-api-access-4kpps\") pod \"4c98be45-7f99-41f4-93dd-f57aa565492f\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.067497 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ssh-key-openstack-cell1\") pod \"4c98be45-7f99-41f4-93dd-f57aa565492f\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.067599 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-combined-ca-bundle\") pod \"4c98be45-7f99-41f4-93dd-f57aa565492f\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.067634 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-inventory\") pod \"4c98be45-7f99-41f4-93dd-f57aa565492f\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.067689 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-agent-neutron-config-0\") pod \"4c98be45-7f99-41f4-93dd-f57aa565492f\" (UID: \"4c98be45-7f99-41f4-93dd-f57aa565492f\") " Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.086886 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ceph" (OuterVolumeSpecName: "ceph") pod "4c98be45-7f99-41f4-93dd-f57aa565492f" (UID: "4c98be45-7f99-41f4-93dd-f57aa565492f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.087442 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c98be45-7f99-41f4-93dd-f57aa565492f-kube-api-access-4kpps" (OuterVolumeSpecName: "kube-api-access-4kpps") pod "4c98be45-7f99-41f4-93dd-f57aa565492f" (UID: "4c98be45-7f99-41f4-93dd-f57aa565492f"). InnerVolumeSpecName "kube-api-access-4kpps". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.093402 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "4c98be45-7f99-41f4-93dd-f57aa565492f" (UID: "4c98be45-7f99-41f4-93dd-f57aa565492f"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.113944 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-inventory" (OuterVolumeSpecName: "inventory") pod "4c98be45-7f99-41f4-93dd-f57aa565492f" (UID: "4c98be45-7f99-41f4-93dd-f57aa565492f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.114575 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "4c98be45-7f99-41f4-93dd-f57aa565492f" (UID: "4c98be45-7f99-41f4-93dd-f57aa565492f"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.115226 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "4c98be45-7f99-41f4-93dd-f57aa565492f" (UID: "4c98be45-7f99-41f4-93dd-f57aa565492f"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.170191 4816 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.170226 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.170240 4816 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.170251 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.170260 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kpps\" (UniqueName: \"kubernetes.io/projected/4c98be45-7f99-41f4-93dd-f57aa565492f-kube-api-access-4kpps\") on node \"crc\" DevicePath \"\"" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.170271 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/4c98be45-7f99-41f4-93dd-f57aa565492f-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.455447 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" event={"ID":"4c98be45-7f99-41f4-93dd-f57aa565492f","Type":"ContainerDied","Data":"bb5b10788de45d32d5266c2f5a81c09197469023f5efdfef32e5fb22e981e681"} Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.455815 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb5b10788de45d32d5266c2f5a81c09197469023f5efdfef32e5fb22e981e681" Feb 16 15:22:55 crc kubenswrapper[4816]: I0216 15:22:55.455909 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-gvbjt" Feb 16 15:23:04 crc kubenswrapper[4816]: I0216 15:23:04.398745 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:23:04 crc kubenswrapper[4816]: E0216 15:23:04.399504 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:23:17 crc kubenswrapper[4816]: I0216 15:23:17.399385 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:23:17 crc kubenswrapper[4816]: E0216 15:23:17.401464 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:23:18 crc kubenswrapper[4816]: I0216 15:23:18.520072 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 15:23:18 crc kubenswrapper[4816]: I0216 15:23:18.520605 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="329ccf42-5f39-4f99-a3c7-4ddc76208882" containerName="nova-cell0-conductor-conductor" containerID="cri-o://8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a" gracePeriod=30 Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.240515 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.241126 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="04537661-4733-45d6-a694-48c9cde1cbb5" containerName="nova-cell1-conductor-conductor" containerID="cri-o://c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe" gracePeriod=30 Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.397083 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.397533 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-log" containerID="cri-o://b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848" gracePeriod=30 Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.397627 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-api" containerID="cri-o://bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453" gracePeriod=30 Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.470023 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.470439 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a918762a-682a-4191-afeb-8a5b2de9de86" containerName="nova-scheduler-scheduler" containerID="cri-o://f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861" gracePeriod=30 Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.551432 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.551875 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-log" containerID="cri-o://2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452" gracePeriod=30 Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.552454 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-metadata" containerID="cri-o://d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca" gracePeriod=30 Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.574072 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn"] Feb 16 15:23:19 crc kubenswrapper[4816]: E0216 15:23:19.574627 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c98be45-7f99-41f4-93dd-f57aa565492f" containerName="neutron-dhcp-openstack-openstack-cell1" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.574642 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c98be45-7f99-41f4-93dd-f57aa565492f" containerName="neutron-dhcp-openstack-openstack-cell1" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.574967 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c98be45-7f99-41f4-93dd-f57aa565492f" containerName="neutron-dhcp-openstack-openstack-cell1" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.575848 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.582157 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.582253 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.582179 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.582701 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.582854 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.582989 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.583228 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qr5hl" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.609759 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn"] Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695233 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695313 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695345 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695366 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695392 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695409 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnbsg\" (UniqueName: \"kubernetes.io/projected/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-kube-api-access-xnbsg\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695424 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695465 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-3\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695481 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-2\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695545 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695584 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695613 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.695637 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.790315 4816 generic.go:334] "Generic (PLEG): container finished" podID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerID="b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848" exitCode=143 Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.790391 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e814f4db-24c6-4fb2-b389-3ab964e8fe40","Type":"ContainerDied","Data":"b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848"} Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.792084 4816 generic.go:334] "Generic (PLEG): container finished" podID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerID="2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452" exitCode=143 Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.792124 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce6e206c-659f-4813-ab40-1cdc6ab9e22d","Type":"ContainerDied","Data":"2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452"} Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.797707 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.797786 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.797830 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.797855 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.797892 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.798803 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnbsg\" (UniqueName: \"kubernetes.io/projected/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-kube-api-access-xnbsg\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.798842 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.798811 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.798905 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-3\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.798930 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-2\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.799043 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.799104 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.799152 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.799193 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.799385 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.806884 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.806947 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.806975 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.807043 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-3\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.807326 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.807391 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.808211 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.811363 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.817833 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.820888 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-2\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.826607 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnbsg\" (UniqueName: \"kubernetes.io/projected/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-kube-api-access-xnbsg\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:19 crc kubenswrapper[4816]: I0216 15:23:19.937152 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.301205 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.403442 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch6hr\" (UniqueName: \"kubernetes.io/projected/329ccf42-5f39-4f99-a3c7-4ddc76208882-kube-api-access-ch6hr\") pod \"329ccf42-5f39-4f99-a3c7-4ddc76208882\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.412329 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/329ccf42-5f39-4f99-a3c7-4ddc76208882-kube-api-access-ch6hr" (OuterVolumeSpecName: "kube-api-access-ch6hr") pod "329ccf42-5f39-4f99-a3c7-4ddc76208882" (UID: "329ccf42-5f39-4f99-a3c7-4ddc76208882"). InnerVolumeSpecName "kube-api-access-ch6hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:23:20 crc kubenswrapper[4816]: E0216 15:23:20.460023 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe is running failed: container process not found" containerID="c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 15:23:20 crc kubenswrapper[4816]: E0216 15:23:20.460459 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe is running failed: container process not found" containerID="c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 15:23:20 crc kubenswrapper[4816]: E0216 15:23:20.460973 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe is running failed: container process not found" containerID="c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Feb 16 15:23:20 crc kubenswrapper[4816]: E0216 15:23:20.461051 4816 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="04537661-4733-45d6-a694-48c9cde1cbb5" containerName="nova-cell1-conductor-conductor" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.518991 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-config-data\") pod \"329ccf42-5f39-4f99-a3c7-4ddc76208882\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.519519 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-combined-ca-bundle\") pod \"329ccf42-5f39-4f99-a3c7-4ddc76208882\" (UID: \"329ccf42-5f39-4f99-a3c7-4ddc76208882\") " Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.520138 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch6hr\" (UniqueName: \"kubernetes.io/projected/329ccf42-5f39-4f99-a3c7-4ddc76208882-kube-api-access-ch6hr\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.557842 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-config-data" (OuterVolumeSpecName: "config-data") pod "329ccf42-5f39-4f99-a3c7-4ddc76208882" (UID: "329ccf42-5f39-4f99-a3c7-4ddc76208882"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.567942 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "329ccf42-5f39-4f99-a3c7-4ddc76208882" (UID: "329ccf42-5f39-4f99-a3c7-4ddc76208882"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.623987 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.624061 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329ccf42-5f39-4f99-a3c7-4ddc76208882-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.682129 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.706850 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn"] Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.725574 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-combined-ca-bundle\") pod \"04537661-4733-45d6-a694-48c9cde1cbb5\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.725683 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9qs5\" (UniqueName: \"kubernetes.io/projected/04537661-4733-45d6-a694-48c9cde1cbb5-kube-api-access-h9qs5\") pod \"04537661-4733-45d6-a694-48c9cde1cbb5\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.725791 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-config-data\") pod \"04537661-4733-45d6-a694-48c9cde1cbb5\" (UID: \"04537661-4733-45d6-a694-48c9cde1cbb5\") " Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.735492 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04537661-4733-45d6-a694-48c9cde1cbb5-kube-api-access-h9qs5" (OuterVolumeSpecName: "kube-api-access-h9qs5") pod "04537661-4733-45d6-a694-48c9cde1cbb5" (UID: "04537661-4733-45d6-a694-48c9cde1cbb5"). InnerVolumeSpecName "kube-api-access-h9qs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.760203 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-config-data" (OuterVolumeSpecName: "config-data") pod "04537661-4733-45d6-a694-48c9cde1cbb5" (UID: "04537661-4733-45d6-a694-48c9cde1cbb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.760505 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04537661-4733-45d6-a694-48c9cde1cbb5" (UID: "04537661-4733-45d6-a694-48c9cde1cbb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.802230 4816 generic.go:334] "Generic (PLEG): container finished" podID="329ccf42-5f39-4f99-a3c7-4ddc76208882" containerID="8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a" exitCode=0 Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.802285 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"329ccf42-5f39-4f99-a3c7-4ddc76208882","Type":"ContainerDied","Data":"8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a"} Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.802310 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"329ccf42-5f39-4f99-a3c7-4ddc76208882","Type":"ContainerDied","Data":"6985ade96fb4dd2479c965efd5da1e35cf1805f87d236c1ae26cb770221c2d16"} Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.802326 4816 scope.go:117] "RemoveContainer" containerID="8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.802430 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.807273 4816 generic.go:334] "Generic (PLEG): container finished" podID="04537661-4733-45d6-a694-48c9cde1cbb5" containerID="c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe" exitCode=0 Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.807311 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.807335 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"04537661-4733-45d6-a694-48c9cde1cbb5","Type":"ContainerDied","Data":"c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe"} Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.807357 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"04537661-4733-45d6-a694-48c9cde1cbb5","Type":"ContainerDied","Data":"d33fcc493407e3e03478d0b824a47f42f06b2bed554a2063feb0d5f5e928314c"} Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.812520 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" event={"ID":"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f","Type":"ContainerStarted","Data":"82cb9cbe8592cba2436cc509fe2e99a55464162a2d8c845c00d9475677044448"} Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.828181 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.828222 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9qs5\" (UniqueName: \"kubernetes.io/projected/04537661-4733-45d6-a694-48c9cde1cbb5-kube-api-access-h9qs5\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.828238 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04537661-4733-45d6-a694-48c9cde1cbb5-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.857549 4816 scope.go:117] "RemoveContainer" containerID="8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.866306 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 15:23:20 crc kubenswrapper[4816]: E0216 15:23:20.867288 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a\": container with ID starting with 8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a not found: ID does not exist" containerID="8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.867329 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a"} err="failed to get container status \"8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a\": rpc error: code = NotFound desc = could not find container \"8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a\": container with ID starting with 8324ba815a0a526eb3431f403f0cb9b72d6d8744ab6c898ec0f622237fa0fa0a not found: ID does not exist" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.867353 4816 scope.go:117] "RemoveContainer" containerID="c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.886786 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.924927 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.928611 4816 scope.go:117] "RemoveContainer" containerID="c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe" Feb 16 15:23:20 crc kubenswrapper[4816]: E0216 15:23:20.929191 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe\": container with ID starting with c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe not found: ID does not exist" containerID="c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.929231 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe"} err="failed to get container status \"c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe\": rpc error: code = NotFound desc = could not find container \"c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe\": container with ID starting with c3a08da8b099d801b9ee3e287cb00361f1b5c432b0a1649c93279df495d4abfe not found: ID does not exist" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.937188 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.948034 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 15:23:20 crc kubenswrapper[4816]: E0216 15:23:20.948526 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="329ccf42-5f39-4f99-a3c7-4ddc76208882" containerName="nova-cell0-conductor-conductor" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.948539 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="329ccf42-5f39-4f99-a3c7-4ddc76208882" containerName="nova-cell0-conductor-conductor" Feb 16 15:23:20 crc kubenswrapper[4816]: E0216 15:23:20.948551 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04537661-4733-45d6-a694-48c9cde1cbb5" containerName="nova-cell1-conductor-conductor" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.948558 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="04537661-4733-45d6-a694-48c9cde1cbb5" containerName="nova-cell1-conductor-conductor" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.948835 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="04537661-4733-45d6-a694-48c9cde1cbb5" containerName="nova-cell1-conductor-conductor" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.948854 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="329ccf42-5f39-4f99-a3c7-4ddc76208882" containerName="nova-cell0-conductor-conductor" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.949682 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.952030 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.960499 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.966823 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.969001 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.979213 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 15:23:20 crc kubenswrapper[4816]: I0216 15:23:20.988124 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.031213 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hztsf\" (UniqueName: \"kubernetes.io/projected/be2b417b-fe2d-4d12-8c42-13fa7587f1fa-kube-api-access-hztsf\") pod \"nova-cell1-conductor-0\" (UID: \"be2b417b-fe2d-4d12-8c42-13fa7587f1fa\") " pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.031484 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be2b417b-fe2d-4d12-8c42-13fa7587f1fa-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"be2b417b-fe2d-4d12-8c42-13fa7587f1fa\") " pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.031587 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ceb6d00c-cca7-4f36-b688-356cc8bc83ab-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ceb6d00c-cca7-4f36-b688-356cc8bc83ab\") " pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.031703 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ceb6d00c-cca7-4f36-b688-356cc8bc83ab-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ceb6d00c-cca7-4f36-b688-356cc8bc83ab\") " pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.031882 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be2b417b-fe2d-4d12-8c42-13fa7587f1fa-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"be2b417b-fe2d-4d12-8c42-13fa7587f1fa\") " pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.032042 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzwmk\" (UniqueName: \"kubernetes.io/projected/ceb6d00c-cca7-4f36-b688-356cc8bc83ab-kube-api-access-gzwmk\") pod \"nova-cell0-conductor-0\" (UID: \"ceb6d00c-cca7-4f36-b688-356cc8bc83ab\") " pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.134937 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzwmk\" (UniqueName: \"kubernetes.io/projected/ceb6d00c-cca7-4f36-b688-356cc8bc83ab-kube-api-access-gzwmk\") pod \"nova-cell0-conductor-0\" (UID: \"ceb6d00c-cca7-4f36-b688-356cc8bc83ab\") " pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.135387 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hztsf\" (UniqueName: \"kubernetes.io/projected/be2b417b-fe2d-4d12-8c42-13fa7587f1fa-kube-api-access-hztsf\") pod \"nova-cell1-conductor-0\" (UID: \"be2b417b-fe2d-4d12-8c42-13fa7587f1fa\") " pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.135520 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be2b417b-fe2d-4d12-8c42-13fa7587f1fa-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"be2b417b-fe2d-4d12-8c42-13fa7587f1fa\") " pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.135679 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ceb6d00c-cca7-4f36-b688-356cc8bc83ab-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ceb6d00c-cca7-4f36-b688-356cc8bc83ab\") " pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.135837 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ceb6d00c-cca7-4f36-b688-356cc8bc83ab-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ceb6d00c-cca7-4f36-b688-356cc8bc83ab\") " pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.136030 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be2b417b-fe2d-4d12-8c42-13fa7587f1fa-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"be2b417b-fe2d-4d12-8c42-13fa7587f1fa\") " pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.139369 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ceb6d00c-cca7-4f36-b688-356cc8bc83ab-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ceb6d00c-cca7-4f36-b688-356cc8bc83ab\") " pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.139455 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ceb6d00c-cca7-4f36-b688-356cc8bc83ab-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ceb6d00c-cca7-4f36-b688-356cc8bc83ab\") " pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.140109 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be2b417b-fe2d-4d12-8c42-13fa7587f1fa-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"be2b417b-fe2d-4d12-8c42-13fa7587f1fa\") " pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.140791 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be2b417b-fe2d-4d12-8c42-13fa7587f1fa-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"be2b417b-fe2d-4d12-8c42-13fa7587f1fa\") " pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.152171 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzwmk\" (UniqueName: \"kubernetes.io/projected/ceb6d00c-cca7-4f36-b688-356cc8bc83ab-kube-api-access-gzwmk\") pod \"nova-cell0-conductor-0\" (UID: \"ceb6d00c-cca7-4f36-b688-356cc8bc83ab\") " pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.153514 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hztsf\" (UniqueName: \"kubernetes.io/projected/be2b417b-fe2d-4d12-8c42-13fa7587f1fa-kube-api-access-hztsf\") pod \"nova-cell1-conductor-0\" (UID: \"be2b417b-fe2d-4d12-8c42-13fa7587f1fa\") " pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.269442 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.288116 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.421141 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04537661-4733-45d6-a694-48c9cde1cbb5" path="/var/lib/kubelet/pods/04537661-4733-45d6-a694-48c9cde1cbb5/volumes" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.423298 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="329ccf42-5f39-4f99-a3c7-4ddc76208882" path="/var/lib/kubelet/pods/329ccf42-5f39-4f99-a3c7-4ddc76208882/volumes" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.833179 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" event={"ID":"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f","Type":"ContainerStarted","Data":"8133a515aeab19226dee64ffc74d9d3a19ac16b0124719414f9356b397a88b9d"} Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.870215 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" podStartSLOduration=2.430083863 podStartE2EDuration="2.870190626s" podCreationTimestamp="2026-02-16 15:23:19 +0000 UTC" firstStartedPulling="2026-02-16 15:23:20.722197011 +0000 UTC m=+8400.048910739" lastFinishedPulling="2026-02-16 15:23:21.162303774 +0000 UTC m=+8400.489017502" observedRunningTime="2026-02-16 15:23:21.85598917 +0000 UTC m=+8401.182702898" watchObservedRunningTime="2026-02-16 15:23:21.870190626 +0000 UTC m=+8401.196904354" Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.913795 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 16 15:23:21 crc kubenswrapper[4816]: I0216 15:23:21.922833 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 16 15:23:21 crc kubenswrapper[4816]: W0216 15:23:21.927433 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podceb6d00c_cca7_4f36_b688_356cc8bc83ab.slice/crio-ccfb99d43424d70d41061e85d9ddf5238b57a583971c57ced7647d789c0db6db WatchSource:0}: Error finding container ccfb99d43424d70d41061e85d9ddf5238b57a583971c57ced7647d789c0db6db: Status 404 returned error can't find the container with id ccfb99d43424d70d41061e85d9ddf5238b57a583971c57ced7647d789c0db6db Feb 16 15:23:22 crc kubenswrapper[4816]: I0216 15:23:22.847787 4816 generic.go:334] "Generic (PLEG): container finished" podID="a918762a-682a-4191-afeb-8a5b2de9de86" containerID="f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861" exitCode=0 Feb 16 15:23:22 crc kubenswrapper[4816]: I0216 15:23:22.848496 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a918762a-682a-4191-afeb-8a5b2de9de86","Type":"ContainerDied","Data":"f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861"} Feb 16 15:23:22 crc kubenswrapper[4816]: I0216 15:23:22.849729 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ceb6d00c-cca7-4f36-b688-356cc8bc83ab","Type":"ContainerStarted","Data":"bf8df72468549b0727133afc3c651fa442279e2b39b03cb2a3c5e3abec348fa6"} Feb 16 15:23:22 crc kubenswrapper[4816]: I0216 15:23:22.849755 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ceb6d00c-cca7-4f36-b688-356cc8bc83ab","Type":"ContainerStarted","Data":"ccfb99d43424d70d41061e85d9ddf5238b57a583971c57ced7647d789c0db6db"} Feb 16 15:23:22 crc kubenswrapper[4816]: I0216 15:23:22.851154 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:22 crc kubenswrapper[4816]: I0216 15:23:22.854271 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"be2b417b-fe2d-4d12-8c42-13fa7587f1fa","Type":"ContainerStarted","Data":"cdab15c862b0ca422aeacd03a3a8117a90fa48f4dcbdadfef9c89c119d0b52ae"} Feb 16 15:23:22 crc kubenswrapper[4816]: I0216 15:23:22.854516 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"be2b417b-fe2d-4d12-8c42-13fa7587f1fa","Type":"ContainerStarted","Data":"e1f7924abc1f73140e0abebce0d4e23544c31ecc06b8a4b7dfafd664c1c45731"} Feb 16 15:23:22 crc kubenswrapper[4816]: I0216 15:23:22.876119 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.87609975 podStartE2EDuration="2.87609975s" podCreationTimestamp="2026-02-16 15:23:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 15:23:22.873747916 +0000 UTC m=+8402.200461644" watchObservedRunningTime="2026-02-16 15:23:22.87609975 +0000 UTC m=+8402.202813478" Feb 16 15:23:22 crc kubenswrapper[4816]: I0216 15:23:22.895107 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.895088626 podStartE2EDuration="2.895088626s" podCreationTimestamp="2026-02-16 15:23:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 15:23:22.890494031 +0000 UTC m=+8402.217207759" watchObservedRunningTime="2026-02-16 15:23:22.895088626 +0000 UTC m=+8402.221802354" Feb 16 15:23:23 crc kubenswrapper[4816]: E0216 15:23:23.105445 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861 is running failed: container process not found" containerID="f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 15:23:23 crc kubenswrapper[4816]: E0216 15:23:23.106861 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861 is running failed: container process not found" containerID="f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 15:23:23 crc kubenswrapper[4816]: E0216 15:23:23.107257 4816 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861 is running failed: container process not found" containerID="f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 16 15:23:23 crc kubenswrapper[4816]: E0216 15:23:23.107316 4816 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="a918762a-682a-4191-afeb-8a5b2de9de86" containerName="nova-scheduler-scheduler" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.132786 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.82:8775/\": read tcp 10.217.0.2:34208->10.217.1.82:8775: read: connection reset by peer" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.133473 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.82:8775/\": read tcp 10.217.0.2:34202->10.217.1.82:8775: read: connection reset by peer" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.544495 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.566001 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.608393 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjq6w\" (UniqueName: \"kubernetes.io/projected/a918762a-682a-4191-afeb-8a5b2de9de86-kube-api-access-rjq6w\") pod \"a918762a-682a-4191-afeb-8a5b2de9de86\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.608443 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-config-data\") pod \"a918762a-682a-4191-afeb-8a5b2de9de86\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.608503 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-combined-ca-bundle\") pod \"a918762a-682a-4191-afeb-8a5b2de9de86\" (UID: \"a918762a-682a-4191-afeb-8a5b2de9de86\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.608544 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md4v6\" (UniqueName: \"kubernetes.io/projected/e814f4db-24c6-4fb2-b389-3ab964e8fe40-kube-api-access-md4v6\") pod \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.608580 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-config-data\") pod \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.608600 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-combined-ca-bundle\") pod \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.609500 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e814f4db-24c6-4fb2-b389-3ab964e8fe40-logs\") pod \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\" (UID: \"e814f4db-24c6-4fb2-b389-3ab964e8fe40\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.611500 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e814f4db-24c6-4fb2-b389-3ab964e8fe40-logs" (OuterVolumeSpecName: "logs") pod "e814f4db-24c6-4fb2-b389-3ab964e8fe40" (UID: "e814f4db-24c6-4fb2-b389-3ab964e8fe40"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.617420 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a918762a-682a-4191-afeb-8a5b2de9de86-kube-api-access-rjq6w" (OuterVolumeSpecName: "kube-api-access-rjq6w") pod "a918762a-682a-4191-afeb-8a5b2de9de86" (UID: "a918762a-682a-4191-afeb-8a5b2de9de86"). InnerVolumeSpecName "kube-api-access-rjq6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.623694 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e814f4db-24c6-4fb2-b389-3ab964e8fe40-kube-api-access-md4v6" (OuterVolumeSpecName: "kube-api-access-md4v6") pod "e814f4db-24c6-4fb2-b389-3ab964e8fe40" (UID: "e814f4db-24c6-4fb2-b389-3ab964e8fe40"). InnerVolumeSpecName "kube-api-access-md4v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.655749 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-config-data" (OuterVolumeSpecName: "config-data") pod "a918762a-682a-4191-afeb-8a5b2de9de86" (UID: "a918762a-682a-4191-afeb-8a5b2de9de86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.669705 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e814f4db-24c6-4fb2-b389-3ab964e8fe40" (UID: "e814f4db-24c6-4fb2-b389-3ab964e8fe40"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.691918 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a918762a-682a-4191-afeb-8a5b2de9de86" (UID: "a918762a-682a-4191-afeb-8a5b2de9de86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.695870 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-config-data" (OuterVolumeSpecName: "config-data") pod "e814f4db-24c6-4fb2-b389-3ab964e8fe40" (UID: "e814f4db-24c6-4fb2-b389-3ab964e8fe40"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.724447 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjq6w\" (UniqueName: \"kubernetes.io/projected/a918762a-682a-4191-afeb-8a5b2de9de86-kube-api-access-rjq6w\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.724504 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.724515 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a918762a-682a-4191-afeb-8a5b2de9de86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.724524 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md4v6\" (UniqueName: \"kubernetes.io/projected/e814f4db-24c6-4fb2-b389-3ab964e8fe40-kube-api-access-md4v6\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.724533 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.724746 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e814f4db-24c6-4fb2-b389-3ab964e8fe40-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.724758 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e814f4db-24c6-4fb2-b389-3ab964e8fe40-logs\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.726897 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.828456 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-logs\") pod \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.828679 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-config-data\") pod \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.828744 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-combined-ca-bundle\") pod \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.828939 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56dmz\" (UniqueName: \"kubernetes.io/projected/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-kube-api-access-56dmz\") pod \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\" (UID: \"ce6e206c-659f-4813-ab40-1cdc6ab9e22d\") " Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.829365 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-logs" (OuterVolumeSpecName: "logs") pod "ce6e206c-659f-4813-ab40-1cdc6ab9e22d" (UID: "ce6e206c-659f-4813-ab40-1cdc6ab9e22d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.832561 4816 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-logs\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.854961 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-kube-api-access-56dmz" (OuterVolumeSpecName: "kube-api-access-56dmz") pod "ce6e206c-659f-4813-ab40-1cdc6ab9e22d" (UID: "ce6e206c-659f-4813-ab40-1cdc6ab9e22d"). InnerVolumeSpecName "kube-api-access-56dmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.860357 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-config-data" (OuterVolumeSpecName: "config-data") pod "ce6e206c-659f-4813-ab40-1cdc6ab9e22d" (UID: "ce6e206c-659f-4813-ab40-1cdc6ab9e22d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.870824 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce6e206c-659f-4813-ab40-1cdc6ab9e22d" (UID: "ce6e206c-659f-4813-ab40-1cdc6ab9e22d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.874716 4816 generic.go:334] "Generic (PLEG): container finished" podID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerID="d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca" exitCode=0 Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.874804 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce6e206c-659f-4813-ab40-1cdc6ab9e22d","Type":"ContainerDied","Data":"d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca"} Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.874837 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce6e206c-659f-4813-ab40-1cdc6ab9e22d","Type":"ContainerDied","Data":"e06d9657c90ade4a43602a3cdcfc4584993f7ff494fd76217b78c627fe33e4ed"} Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.874879 4816 scope.go:117] "RemoveContainer" containerID="d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.875081 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.883103 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a918762a-682a-4191-afeb-8a5b2de9de86","Type":"ContainerDied","Data":"523029ffb14834e23b2f1c180ca83aec8abdcfaf61de7eca006d69f5dc143745"} Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.883118 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.889289 4816 generic.go:334] "Generic (PLEG): container finished" podID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerID="bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453" exitCode=0 Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.889401 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e814f4db-24c6-4fb2-b389-3ab964e8fe40","Type":"ContainerDied","Data":"bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453"} Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.889442 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.889460 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e814f4db-24c6-4fb2-b389-3ab964e8fe40","Type":"ContainerDied","Data":"a1f6412ec3e573f7e2ca99eb7e8bba01d918656d0b468d2861e2b310a6410a81"} Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.889932 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.938118 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56dmz\" (UniqueName: \"kubernetes.io/projected/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-kube-api-access-56dmz\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.938147 4816 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-config-data\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:23 crc kubenswrapper[4816]: I0216 15:23:23.938158 4816 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e206c-659f-4813-ab40-1cdc6ab9e22d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.003896 4816 scope.go:117] "RemoveContainer" containerID="2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.014849 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.046812 4816 scope.go:117] "RemoveContainer" containerID="d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.046919 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: E0216 15:23:24.048144 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca\": container with ID starting with d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca not found: ID does not exist" containerID="d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.048204 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca"} err="failed to get container status \"d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca\": rpc error: code = NotFound desc = could not find container \"d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca\": container with ID starting with d7804b399da2fa2f8a8d2ba86df031675fec456a1fab6f58c5543a65476900ca not found: ID does not exist" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.048240 4816 scope.go:117] "RemoveContainer" containerID="2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452" Feb 16 15:23:24 crc kubenswrapper[4816]: E0216 15:23:24.048499 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452\": container with ID starting with 2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452 not found: ID does not exist" containerID="2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.048534 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452"} err="failed to get container status \"2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452\": rpc error: code = NotFound desc = could not find container \"2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452\": container with ID starting with 2a5988613b516314f286cf61e29199ee4c82cd743b5e6ead4a5905b7fe759452 not found: ID does not exist" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.048547 4816 scope.go:117] "RemoveContainer" containerID="f916739b6b3ec7ab4a5b7b4b0b4ebdfe1c34b4e94a2280ab42d05d0151664861" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.062122 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.070035 4816 scope.go:117] "RemoveContainer" containerID="bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.079927 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.091180 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: E0216 15:23:24.091792 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-log" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.091818 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-log" Feb 16 15:23:24 crc kubenswrapper[4816]: E0216 15:23:24.091829 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a918762a-682a-4191-afeb-8a5b2de9de86" containerName="nova-scheduler-scheduler" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.091836 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="a918762a-682a-4191-afeb-8a5b2de9de86" containerName="nova-scheduler-scheduler" Feb 16 15:23:24 crc kubenswrapper[4816]: E0216 15:23:24.091861 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-api" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.091868 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-api" Feb 16 15:23:24 crc kubenswrapper[4816]: E0216 15:23:24.091886 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-log" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.091891 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-log" Feb 16 15:23:24 crc kubenswrapper[4816]: E0216 15:23:24.091902 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-metadata" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.091908 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-metadata" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.092107 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-log" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.092122 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="a918762a-682a-4191-afeb-8a5b2de9de86" containerName="nova-scheduler-scheduler" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.092130 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" containerName="nova-metadata-metadata" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.092141 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-api" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.092152 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" containerName="nova-api-log" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.093260 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.096752 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.103164 4816 scope.go:117] "RemoveContainer" containerID="b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.228329 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.229849 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.234916 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.254933 4816 scope.go:117] "RemoveContainer" containerID="bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453" Feb 16 15:23:24 crc kubenswrapper[4816]: E0216 15:23:24.256178 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453\": container with ID starting with bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453 not found: ID does not exist" containerID="bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.256217 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453"} err="failed to get container status \"bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453\": rpc error: code = NotFound desc = could not find container \"bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453\": container with ID starting with bfa3eb872de6f30468ececfe6f58150d0393897bb57aa3c2b06f1a192a962453 not found: ID does not exist" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.256247 4816 scope.go:117] "RemoveContainer" containerID="b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848" Feb 16 15:23:24 crc kubenswrapper[4816]: E0216 15:23:24.257014 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848\": container with ID starting with b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848 not found: ID does not exist" containerID="b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.257040 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848"} err="failed to get container status \"b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848\": rpc error: code = NotFound desc = could not find container \"b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848\": container with ID starting with b7ffe1384a8857725c406c43572cb3adb3b9bfab26290fadb8dc2d43be6ac848 not found: ID does not exist" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.267857 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.278845 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.288584 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.323817 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.327752 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28fcfa43-069c-4810-9362-611b963f3fca-config-data\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.327904 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnbsn\" (UniqueName: \"kubernetes.io/projected/5f7413d4-1b7b-484f-9b5d-a615a36bc1a2-kube-api-access-pnbsn\") pod \"nova-scheduler-0\" (UID: \"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2\") " pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.327948 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvt4k\" (UniqueName: \"kubernetes.io/projected/28fcfa43-069c-4810-9362-611b963f3fca-kube-api-access-mvt4k\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.328024 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28fcfa43-069c-4810-9362-611b963f3fca-logs\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.328504 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f7413d4-1b7b-484f-9b5d-a615a36bc1a2-config-data\") pod \"nova-scheduler-0\" (UID: \"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2\") " pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.329029 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fcfa43-069c-4810-9362-611b963f3fca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.329092 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f7413d4-1b7b-484f-9b5d-a615a36bc1a2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2\") " pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.337708 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.339843 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.342836 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.350862 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.439971 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d4b4085-0c30-40ad-9941-241c956955b7-logs\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440029 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdn4r\" (UniqueName: \"kubernetes.io/projected/5d4b4085-0c30-40ad-9941-241c956955b7-kube-api-access-vdn4r\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440059 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fcfa43-069c-4810-9362-611b963f3fca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440078 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f7413d4-1b7b-484f-9b5d-a615a36bc1a2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2\") " pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440207 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d4b4085-0c30-40ad-9941-241c956955b7-config-data\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440423 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28fcfa43-069c-4810-9362-611b963f3fca-config-data\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440539 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnbsn\" (UniqueName: \"kubernetes.io/projected/5f7413d4-1b7b-484f-9b5d-a615a36bc1a2-kube-api-access-pnbsn\") pod \"nova-scheduler-0\" (UID: \"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2\") " pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440567 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvt4k\" (UniqueName: \"kubernetes.io/projected/28fcfa43-069c-4810-9362-611b963f3fca-kube-api-access-mvt4k\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440631 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28fcfa43-069c-4810-9362-611b963f3fca-logs\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440810 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f7413d4-1b7b-484f-9b5d-a615a36bc1a2-config-data\") pod \"nova-scheduler-0\" (UID: \"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2\") " pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.440834 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d4b4085-0c30-40ad-9941-241c956955b7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.441548 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28fcfa43-069c-4810-9362-611b963f3fca-logs\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.444566 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f7413d4-1b7b-484f-9b5d-a615a36bc1a2-config-data\") pod \"nova-scheduler-0\" (UID: \"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2\") " pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.444912 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f7413d4-1b7b-484f-9b5d-a615a36bc1a2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2\") " pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.448334 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fcfa43-069c-4810-9362-611b963f3fca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.451506 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28fcfa43-069c-4810-9362-611b963f3fca-config-data\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.458077 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvt4k\" (UniqueName: \"kubernetes.io/projected/28fcfa43-069c-4810-9362-611b963f3fca-kube-api-access-mvt4k\") pod \"nova-api-0\" (UID: \"28fcfa43-069c-4810-9362-611b963f3fca\") " pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.458243 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnbsn\" (UniqueName: \"kubernetes.io/projected/5f7413d4-1b7b-484f-9b5d-a615a36bc1a2-kube-api-access-pnbsn\") pod \"nova-scheduler-0\" (UID: \"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2\") " pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.520249 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.542958 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d4b4085-0c30-40ad-9941-241c956955b7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.543032 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d4b4085-0c30-40ad-9941-241c956955b7-logs\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.543055 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdn4r\" (UniqueName: \"kubernetes.io/projected/5d4b4085-0c30-40ad-9941-241c956955b7-kube-api-access-vdn4r\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.543083 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d4b4085-0c30-40ad-9941-241c956955b7-config-data\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.547032 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d4b4085-0c30-40ad-9941-241c956955b7-config-data\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.550630 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d4b4085-0c30-40ad-9941-241c956955b7-logs\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.552294 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d4b4085-0c30-40ad-9941-241c956955b7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.563317 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.577371 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdn4r\" (UniqueName: \"kubernetes.io/projected/5d4b4085-0c30-40ad-9941-241c956955b7-kube-api-access-vdn4r\") pod \"nova-metadata-0\" (UID: \"5d4b4085-0c30-40ad-9941-241c956955b7\") " pod="openstack/nova-metadata-0" Feb 16 15:23:24 crc kubenswrapper[4816]: I0216 15:23:24.672985 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.051496 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.135473 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 16 15:23:25 crc kubenswrapper[4816]: W0216 15:23:25.149520 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f7413d4_1b7b_484f_9b5d_a615a36bc1a2.slice/crio-e13d09bd223382d6280d6e4bcd81277022af0ae8260670851e468dd8b975a62c WatchSource:0}: Error finding container e13d09bd223382d6280d6e4bcd81277022af0ae8260670851e468dd8b975a62c: Status 404 returned error can't find the container with id e13d09bd223382d6280d6e4bcd81277022af0ae8260670851e468dd8b975a62c Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.225595 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 16 15:23:25 crc kubenswrapper[4816]: W0216 15:23:25.236931 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d4b4085_0c30_40ad_9941_241c956955b7.slice/crio-7a6b01e076cda0e88830d791bbf304e508331032f6fdf2898b7f0cb763ba49b7 WatchSource:0}: Error finding container 7a6b01e076cda0e88830d791bbf304e508331032f6fdf2898b7f0cb763ba49b7: Status 404 returned error can't find the container with id 7a6b01e076cda0e88830d791bbf304e508331032f6fdf2898b7f0cb763ba49b7 Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.418957 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a918762a-682a-4191-afeb-8a5b2de9de86" path="/var/lib/kubelet/pods/a918762a-682a-4191-afeb-8a5b2de9de86/volumes" Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.419596 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce6e206c-659f-4813-ab40-1cdc6ab9e22d" path="/var/lib/kubelet/pods/ce6e206c-659f-4813-ab40-1cdc6ab9e22d/volumes" Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.420276 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e814f4db-24c6-4fb2-b389-3ab964e8fe40" path="/var/lib/kubelet/pods/e814f4db-24c6-4fb2-b389-3ab964e8fe40/volumes" Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.921394 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2","Type":"ContainerStarted","Data":"b40f38627e4d9b3a7c851007b0ebbaf67322e8aac6e608c2dfb504e90c14c59e"} Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.921888 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5f7413d4-1b7b-484f-9b5d-a615a36bc1a2","Type":"ContainerStarted","Data":"e13d09bd223382d6280d6e4bcd81277022af0ae8260670851e468dd8b975a62c"} Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.925603 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5d4b4085-0c30-40ad-9941-241c956955b7","Type":"ContainerStarted","Data":"d0e63325bced11bde3962f2c5a51b82c990822ed838fe3e201775038aeb87d15"} Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.925679 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5d4b4085-0c30-40ad-9941-241c956955b7","Type":"ContainerStarted","Data":"3d27d23eed370b2ad31d2a4826f323e20f6041703cc81514b3cab05cad5580d1"} Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.925694 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5d4b4085-0c30-40ad-9941-241c956955b7","Type":"ContainerStarted","Data":"7a6b01e076cda0e88830d791bbf304e508331032f6fdf2898b7f0cb763ba49b7"} Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.932604 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"28fcfa43-069c-4810-9362-611b963f3fca","Type":"ContainerStarted","Data":"ad388ee3ce68b69bc989b5cb55488df7037efb4ca187effb2ca6c4b7b4191ae6"} Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.932646 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"28fcfa43-069c-4810-9362-611b963f3fca","Type":"ContainerStarted","Data":"df35131b461877987b8026b7828836654b48668a6db1d4861f4f715574ba1473"} Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.932673 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"28fcfa43-069c-4810-9362-611b963f3fca","Type":"ContainerStarted","Data":"c8ad2868d5e5727111977be33ab1cd2fd32bba6316b18b979f20973ef80463cc"} Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.950110 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.950072469 podStartE2EDuration="1.950072469s" podCreationTimestamp="2026-02-16 15:23:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 15:23:25.942626436 +0000 UTC m=+8405.269340164" watchObservedRunningTime="2026-02-16 15:23:25.950072469 +0000 UTC m=+8405.276786197" Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.962186 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.962162287 podStartE2EDuration="1.962162287s" podCreationTimestamp="2026-02-16 15:23:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 15:23:25.958045746 +0000 UTC m=+8405.284759484" watchObservedRunningTime="2026-02-16 15:23:25.962162287 +0000 UTC m=+8405.288876015" Feb 16 15:23:25 crc kubenswrapper[4816]: I0216 15:23:25.984334 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.9843123390000001 podStartE2EDuration="1.984312339s" podCreationTimestamp="2026-02-16 15:23:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 15:23:25.982319815 +0000 UTC m=+8405.309033543" watchObservedRunningTime="2026-02-16 15:23:25.984312339 +0000 UTC m=+8405.311026067" Feb 16 15:23:29 crc kubenswrapper[4816]: I0216 15:23:29.564368 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 16 15:23:29 crc kubenswrapper[4816]: I0216 15:23:29.674647 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 15:23:29 crc kubenswrapper[4816]: I0216 15:23:29.675034 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 16 15:23:31 crc kubenswrapper[4816]: I0216 15:23:31.305177 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 16 15:23:31 crc kubenswrapper[4816]: I0216 15:23:31.326973 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 16 15:23:32 crc kubenswrapper[4816]: I0216 15:23:32.399215 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:23:32 crc kubenswrapper[4816]: E0216 15:23:32.400020 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:23:34 crc kubenswrapper[4816]: I0216 15:23:34.520802 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 15:23:34 crc kubenswrapper[4816]: I0216 15:23:34.521091 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 16 15:23:34 crc kubenswrapper[4816]: I0216 15:23:34.564599 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 16 15:23:34 crc kubenswrapper[4816]: I0216 15:23:34.592773 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 16 15:23:34 crc kubenswrapper[4816]: I0216 15:23:34.674909 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 15:23:34 crc kubenswrapper[4816]: I0216 15:23:34.674969 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 16 15:23:35 crc kubenswrapper[4816]: I0216 15:23:35.064972 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 16 15:23:35 crc kubenswrapper[4816]: I0216 15:23:35.603907 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="28fcfa43-069c-4810-9362-611b963f3fca" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 15:23:35 crc kubenswrapper[4816]: I0216 15:23:35.603957 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="28fcfa43-069c-4810-9362-611b963f3fca" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 15:23:35 crc kubenswrapper[4816]: I0216 15:23:35.756890 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5d4b4085-0c30-40ad-9941-241c956955b7" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.189:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 15:23:35 crc kubenswrapper[4816]: I0216 15:23:35.756935 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5d4b4085-0c30-40ad-9941-241c956955b7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.189:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 16 15:23:44 crc kubenswrapper[4816]: I0216 15:23:44.523959 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 15:23:44 crc kubenswrapper[4816]: I0216 15:23:44.524417 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 16 15:23:44 crc kubenswrapper[4816]: I0216 15:23:44.525339 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 15:23:44 crc kubenswrapper[4816]: I0216 15:23:44.525351 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 16 15:23:44 crc kubenswrapper[4816]: I0216 15:23:44.528296 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 15:23:44 crc kubenswrapper[4816]: I0216 15:23:44.530076 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 16 15:23:44 crc kubenswrapper[4816]: I0216 15:23:44.679839 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 15:23:44 crc kubenswrapper[4816]: I0216 15:23:44.681318 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 16 15:23:44 crc kubenswrapper[4816]: I0216 15:23:44.682894 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 15:23:45 crc kubenswrapper[4816]: I0216 15:23:45.125342 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 16 15:23:46 crc kubenswrapper[4816]: I0216 15:23:46.399642 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:23:46 crc kubenswrapper[4816]: E0216 15:23:46.400394 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:23:59 crc kubenswrapper[4816]: I0216 15:23:59.398781 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:23:59 crc kubenswrapper[4816]: E0216 15:23:59.399463 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:24:14 crc kubenswrapper[4816]: I0216 15:24:14.399396 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:24:14 crc kubenswrapper[4816]: E0216 15:24:14.400231 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.328433 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x8jss"] Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.332465 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.346384 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x8jss"] Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.429881 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-utilities\") pod \"community-operators-x8jss\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.430012 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vvtl\" (UniqueName: \"kubernetes.io/projected/78410791-525e-402a-9308-20dc3288c16a-kube-api-access-6vvtl\") pod \"community-operators-x8jss\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.430093 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-catalog-content\") pod \"community-operators-x8jss\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.532213 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-catalog-content\") pod \"community-operators-x8jss\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.532344 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-utilities\") pod \"community-operators-x8jss\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.532467 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vvtl\" (UniqueName: \"kubernetes.io/projected/78410791-525e-402a-9308-20dc3288c16a-kube-api-access-6vvtl\") pod \"community-operators-x8jss\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.532858 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-catalog-content\") pod \"community-operators-x8jss\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.532883 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-utilities\") pod \"community-operators-x8jss\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.552335 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vvtl\" (UniqueName: \"kubernetes.io/projected/78410791-525e-402a-9308-20dc3288c16a-kube-api-access-6vvtl\") pod \"community-operators-x8jss\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:24 crc kubenswrapper[4816]: I0216 15:24:24.668788 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:25 crc kubenswrapper[4816]: I0216 15:24:25.204027 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x8jss"] Feb 16 15:24:25 crc kubenswrapper[4816]: I0216 15:24:25.582671 4816 generic.go:334] "Generic (PLEG): container finished" podID="78410791-525e-402a-9308-20dc3288c16a" containerID="a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e" exitCode=0 Feb 16 15:24:25 crc kubenswrapper[4816]: I0216 15:24:25.582774 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8jss" event={"ID":"78410791-525e-402a-9308-20dc3288c16a","Type":"ContainerDied","Data":"a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e"} Feb 16 15:24:25 crc kubenswrapper[4816]: I0216 15:24:25.582975 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8jss" event={"ID":"78410791-525e-402a-9308-20dc3288c16a","Type":"ContainerStarted","Data":"8db9e2a42e999878fb552b5e1fa9c8168ac529fe678d992a7b1ae4e3ae3c8a8e"} Feb 16 15:24:26 crc kubenswrapper[4816]: I0216 15:24:26.598169 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8jss" event={"ID":"78410791-525e-402a-9308-20dc3288c16a","Type":"ContainerStarted","Data":"9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9"} Feb 16 15:24:27 crc kubenswrapper[4816]: I0216 15:24:27.613302 4816 generic.go:334] "Generic (PLEG): container finished" podID="78410791-525e-402a-9308-20dc3288c16a" containerID="9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9" exitCode=0 Feb 16 15:24:27 crc kubenswrapper[4816]: I0216 15:24:27.613638 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8jss" event={"ID":"78410791-525e-402a-9308-20dc3288c16a","Type":"ContainerDied","Data":"9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9"} Feb 16 15:24:27 crc kubenswrapper[4816]: E0216 15:24:27.762280 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78410791_525e_402a_9308_20dc3288c16a.slice/crio-9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78410791_525e_402a_9308_20dc3288c16a.slice/crio-conmon-9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9.scope\": RecentStats: unable to find data in memory cache]" Feb 16 15:24:28 crc kubenswrapper[4816]: I0216 15:24:28.398915 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:24:28 crc kubenswrapper[4816]: E0216 15:24:28.399393 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:24:28 crc kubenswrapper[4816]: I0216 15:24:28.624436 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8jss" event={"ID":"78410791-525e-402a-9308-20dc3288c16a","Type":"ContainerStarted","Data":"b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96"} Feb 16 15:24:28 crc kubenswrapper[4816]: I0216 15:24:28.660950 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x8jss" podStartSLOduration=2.26720127 podStartE2EDuration="4.660926319s" podCreationTimestamp="2026-02-16 15:24:24 +0000 UTC" firstStartedPulling="2026-02-16 15:24:25.584370459 +0000 UTC m=+8464.911084207" lastFinishedPulling="2026-02-16 15:24:27.978095528 +0000 UTC m=+8467.304809256" observedRunningTime="2026-02-16 15:24:28.645879299 +0000 UTC m=+8467.972593037" watchObservedRunningTime="2026-02-16 15:24:28.660926319 +0000 UTC m=+8467.987640057" Feb 16 15:24:34 crc kubenswrapper[4816]: I0216 15:24:34.669329 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:34 crc kubenswrapper[4816]: I0216 15:24:34.669918 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:34 crc kubenswrapper[4816]: I0216 15:24:34.719842 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:35 crc kubenswrapper[4816]: I0216 15:24:35.755297 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:35 crc kubenswrapper[4816]: I0216 15:24:35.829145 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x8jss"] Feb 16 15:24:37 crc kubenswrapper[4816]: I0216 15:24:37.719484 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x8jss" podUID="78410791-525e-402a-9308-20dc3288c16a" containerName="registry-server" containerID="cri-o://b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96" gracePeriod=2 Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.246195 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.350037 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vvtl\" (UniqueName: \"kubernetes.io/projected/78410791-525e-402a-9308-20dc3288c16a-kube-api-access-6vvtl\") pod \"78410791-525e-402a-9308-20dc3288c16a\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.350203 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-utilities\") pod \"78410791-525e-402a-9308-20dc3288c16a\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.350407 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-catalog-content\") pod \"78410791-525e-402a-9308-20dc3288c16a\" (UID: \"78410791-525e-402a-9308-20dc3288c16a\") " Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.351414 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-utilities" (OuterVolumeSpecName: "utilities") pod "78410791-525e-402a-9308-20dc3288c16a" (UID: "78410791-525e-402a-9308-20dc3288c16a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.364963 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78410791-525e-402a-9308-20dc3288c16a-kube-api-access-6vvtl" (OuterVolumeSpecName: "kube-api-access-6vvtl") pod "78410791-525e-402a-9308-20dc3288c16a" (UID: "78410791-525e-402a-9308-20dc3288c16a"). InnerVolumeSpecName "kube-api-access-6vvtl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.408667 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78410791-525e-402a-9308-20dc3288c16a" (UID: "78410791-525e-402a-9308-20dc3288c16a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.453692 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vvtl\" (UniqueName: \"kubernetes.io/projected/78410791-525e-402a-9308-20dc3288c16a-kube-api-access-6vvtl\") on node \"crc\" DevicePath \"\"" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.453724 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.453735 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78410791-525e-402a-9308-20dc3288c16a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.730109 4816 generic.go:334] "Generic (PLEG): container finished" podID="78410791-525e-402a-9308-20dc3288c16a" containerID="b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96" exitCode=0 Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.730185 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8jss" event={"ID":"78410791-525e-402a-9308-20dc3288c16a","Type":"ContainerDied","Data":"b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96"} Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.730254 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8jss" event={"ID":"78410791-525e-402a-9308-20dc3288c16a","Type":"ContainerDied","Data":"8db9e2a42e999878fb552b5e1fa9c8168ac529fe678d992a7b1ae4e3ae3c8a8e"} Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.730280 4816 scope.go:117] "RemoveContainer" containerID="b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.731440 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x8jss" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.762006 4816 scope.go:117] "RemoveContainer" containerID="9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.775618 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x8jss"] Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.786372 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x8jss"] Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.795194 4816 scope.go:117] "RemoveContainer" containerID="a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.845747 4816 scope.go:117] "RemoveContainer" containerID="b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96" Feb 16 15:24:38 crc kubenswrapper[4816]: E0216 15:24:38.846151 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96\": container with ID starting with b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96 not found: ID does not exist" containerID="b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.846191 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96"} err="failed to get container status \"b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96\": rpc error: code = NotFound desc = could not find container \"b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96\": container with ID starting with b547ff567e617edcddbd9a5473b088133bd33dd16133f1a7401c14988171cb96 not found: ID does not exist" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.846215 4816 scope.go:117] "RemoveContainer" containerID="9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9" Feb 16 15:24:38 crc kubenswrapper[4816]: E0216 15:24:38.846416 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9\": container with ID starting with 9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9 not found: ID does not exist" containerID="9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.846444 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9"} err="failed to get container status \"9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9\": rpc error: code = NotFound desc = could not find container \"9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9\": container with ID starting with 9dfe5569af6cbc30a4b2f97f970bb6645fd9939fc004391e091c22732ac7b8d9 not found: ID does not exist" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.846461 4816 scope.go:117] "RemoveContainer" containerID="a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e" Feb 16 15:24:38 crc kubenswrapper[4816]: E0216 15:24:38.846665 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e\": container with ID starting with a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e not found: ID does not exist" containerID="a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e" Feb 16 15:24:38 crc kubenswrapper[4816]: I0216 15:24:38.846690 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e"} err="failed to get container status \"a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e\": rpc error: code = NotFound desc = could not find container \"a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e\": container with ID starting with a3f367a2bf9fc72427129eb53d781071327b0da5fce1119de91bf940b9bd030e not found: ID does not exist" Feb 16 15:24:39 crc kubenswrapper[4816]: I0216 15:24:39.413370 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78410791-525e-402a-9308-20dc3288c16a" path="/var/lib/kubelet/pods/78410791-525e-402a-9308-20dc3288c16a/volumes" Feb 16 15:24:41 crc kubenswrapper[4816]: I0216 15:24:41.408098 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:24:41 crc kubenswrapper[4816]: I0216 15:24:41.768892 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"577ce560bb82351791a6fc74b7dcadb020b11578ab141cbc58f79d5f440d65b3"} Feb 16 15:27:06 crc kubenswrapper[4816]: I0216 15:27:06.940721 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:27:06 crc kubenswrapper[4816]: I0216 15:27:06.941386 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:27:12 crc kubenswrapper[4816]: E0216 15:27:12.581950 4816 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod600c3f65_ad0a_41fe_9fe8_8cc2870bdf9f.slice/crio-8133a515aeab19226dee64ffc74d9d3a19ac16b0124719414f9356b397a88b9d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod600c3f65_ad0a_41fe_9fe8_8cc2870bdf9f.slice/crio-conmon-8133a515aeab19226dee64ffc74d9d3a19ac16b0124719414f9356b397a88b9d.scope\": RecentStats: unable to find data in memory cache]" Feb 16 15:27:12 crc kubenswrapper[4816]: I0216 15:27:12.648317 4816 generic.go:334] "Generic (PLEG): container finished" podID="600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" containerID="8133a515aeab19226dee64ffc74d9d3a19ac16b0124719414f9356b397a88b9d" exitCode=0 Feb 16 15:27:12 crc kubenswrapper[4816]: I0216 15:27:12.648686 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" event={"ID":"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f","Type":"ContainerDied","Data":"8133a515aeab19226dee64ffc74d9d3a19ac16b0124719414f9356b397a88b9d"} Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.141017 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.278847 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-0\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.278950 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-1\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279058 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnbsg\" (UniqueName: \"kubernetes.io/projected/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-kube-api-access-xnbsg\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279080 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-2\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279101 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-combined-ca-bundle\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279155 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-1\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279203 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-0\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279227 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ssh-key-openstack-cell1\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279245 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-1\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279261 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-3\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279303 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ceph\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279352 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-inventory\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.279414 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-0\") pod \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\" (UID: \"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f\") " Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.288064 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.299611 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-kube-api-access-xnbsg" (OuterVolumeSpecName: "kube-api-access-xnbsg") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "kube-api-access-xnbsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.303064 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ceph" (OuterVolumeSpecName: "ceph") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.309554 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.320262 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-2" (OuterVolumeSpecName: "nova-cell1-compute-config-2") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "nova-cell1-compute-config-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.323061 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.327379 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-inventory" (OuterVolumeSpecName: "inventory") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.331437 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.332173 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-3" (OuterVolumeSpecName: "nova-cell1-compute-config-3") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "nova-cell1-compute-config-3". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.334148 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.335468 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.337334 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.341196 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" (UID: "600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382721 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382754 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382764 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnbsg\" (UniqueName: \"kubernetes.io/projected/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-kube-api-access-xnbsg\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382774 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-2\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382783 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382794 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382803 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382811 4816 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382819 4816 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382829 4816 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-cell1-compute-config-3\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382838 4816 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-ceph\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382849 4816 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-inventory\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.382857 4816 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.670580 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" event={"ID":"600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f","Type":"ContainerDied","Data":"82cb9cbe8592cba2436cc509fe2e99a55464162a2d8c845c00d9475677044448"} Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.670938 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82cb9cbe8592cba2436cc509fe2e99a55464162a2d8c845c00d9475677044448" Feb 16 15:27:14 crc kubenswrapper[4816]: I0216 15:27:14.670625 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn" Feb 16 15:27:36 crc kubenswrapper[4816]: I0216 15:27:36.941187 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:27:36 crc kubenswrapper[4816]: I0216 15:27:36.941741 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:28:06 crc kubenswrapper[4816]: I0216 15:28:06.941258 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:28:06 crc kubenswrapper[4816]: I0216 15:28:06.942744 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:28:06 crc kubenswrapper[4816]: I0216 15:28:06.942867 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 15:28:06 crc kubenswrapper[4816]: I0216 15:28:06.943794 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"577ce560bb82351791a6fc74b7dcadb020b11578ab141cbc58f79d5f440d65b3"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 15:28:06 crc kubenswrapper[4816]: I0216 15:28:06.943966 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://577ce560bb82351791a6fc74b7dcadb020b11578ab141cbc58f79d5f440d65b3" gracePeriod=600 Feb 16 15:28:07 crc kubenswrapper[4816]: I0216 15:28:07.263209 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="577ce560bb82351791a6fc74b7dcadb020b11578ab141cbc58f79d5f440d65b3" exitCode=0 Feb 16 15:28:07 crc kubenswrapper[4816]: I0216 15:28:07.263256 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"577ce560bb82351791a6fc74b7dcadb020b11578ab141cbc58f79d5f440d65b3"} Feb 16 15:28:07 crc kubenswrapper[4816]: I0216 15:28:07.263319 4816 scope.go:117] "RemoveContainer" containerID="4c9940b52568a3575954b9706452745916befc634baabae87dfe74cc4934c8e3" Feb 16 15:28:08 crc kubenswrapper[4816]: I0216 15:28:08.275096 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64"} Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.158883 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6"] Feb 16 15:30:00 crc kubenswrapper[4816]: E0216 15:30:00.160006 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78410791-525e-402a-9308-20dc3288c16a" containerName="registry-server" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.160032 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="78410791-525e-402a-9308-20dc3288c16a" containerName="registry-server" Feb 16 15:30:00 crc kubenswrapper[4816]: E0216 15:30:00.160063 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78410791-525e-402a-9308-20dc3288c16a" containerName="extract-utilities" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.160069 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="78410791-525e-402a-9308-20dc3288c16a" containerName="extract-utilities" Feb 16 15:30:00 crc kubenswrapper[4816]: E0216 15:30:00.160090 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78410791-525e-402a-9308-20dc3288c16a" containerName="extract-content" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.160096 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="78410791-525e-402a-9308-20dc3288c16a" containerName="extract-content" Feb 16 15:30:00 crc kubenswrapper[4816]: E0216 15:30:00.160111 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.160121 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.160405 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="78410791-525e-402a-9308-20dc3288c16a" containerName="registry-server" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.160433 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.161377 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.163857 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.163883 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.174266 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6"] Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.270084 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89a18063-aa20-421b-8605-e4dc95d035c4-config-volume\") pod \"collect-profiles-29520930-lfgk6\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.270716 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89a18063-aa20-421b-8605-e4dc95d035c4-secret-volume\") pod \"collect-profiles-29520930-lfgk6\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.270823 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz485\" (UniqueName: \"kubernetes.io/projected/89a18063-aa20-421b-8605-e4dc95d035c4-kube-api-access-tz485\") pod \"collect-profiles-29520930-lfgk6\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.373889 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89a18063-aa20-421b-8605-e4dc95d035c4-config-volume\") pod \"collect-profiles-29520930-lfgk6\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.373987 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89a18063-aa20-421b-8605-e4dc95d035c4-secret-volume\") pod \"collect-profiles-29520930-lfgk6\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.374080 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz485\" (UniqueName: \"kubernetes.io/projected/89a18063-aa20-421b-8605-e4dc95d035c4-kube-api-access-tz485\") pod \"collect-profiles-29520930-lfgk6\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.374879 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89a18063-aa20-421b-8605-e4dc95d035c4-config-volume\") pod \"collect-profiles-29520930-lfgk6\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.385303 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89a18063-aa20-421b-8605-e4dc95d035c4-secret-volume\") pod \"collect-profiles-29520930-lfgk6\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.393987 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz485\" (UniqueName: \"kubernetes.io/projected/89a18063-aa20-421b-8605-e4dc95d035c4-kube-api-access-tz485\") pod \"collect-profiles-29520930-lfgk6\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.491604 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:00 crc kubenswrapper[4816]: I0216 15:30:00.969104 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6"] Feb 16 15:30:01 crc kubenswrapper[4816]: I0216 15:30:01.717870 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r29rt"] Feb 16 15:30:01 crc kubenswrapper[4816]: I0216 15:30:01.720629 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:01 crc kubenswrapper[4816]: I0216 15:30:01.736981 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r29rt"] Feb 16 15:30:01 crc kubenswrapper[4816]: I0216 15:30:01.806090 4816 generic.go:334] "Generic (PLEG): container finished" podID="89a18063-aa20-421b-8605-e4dc95d035c4" containerID="b6968a4fc1318b5bb51c4fe28dfe40f531e20bd58d34047acb369b54c70f06a1" exitCode=0 Feb 16 15:30:01 crc kubenswrapper[4816]: I0216 15:30:01.806143 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" event={"ID":"89a18063-aa20-421b-8605-e4dc95d035c4","Type":"ContainerDied","Data":"b6968a4fc1318b5bb51c4fe28dfe40f531e20bd58d34047acb369b54c70f06a1"} Feb 16 15:30:01 crc kubenswrapper[4816]: I0216 15:30:01.806172 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" event={"ID":"89a18063-aa20-421b-8605-e4dc95d035c4","Type":"ContainerStarted","Data":"831b4e3752c689994b983934d5b17a33d1cd612abe0832def69f382bdd22048c"} Feb 16 15:30:01 crc kubenswrapper[4816]: I0216 15:30:01.913366 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtd4r\" (UniqueName: \"kubernetes.io/projected/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-kube-api-access-wtd4r\") pod \"redhat-operators-r29rt\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:01 crc kubenswrapper[4816]: I0216 15:30:01.913410 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-catalog-content\") pod \"redhat-operators-r29rt\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:01 crc kubenswrapper[4816]: I0216 15:30:01.913431 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-utilities\") pod \"redhat-operators-r29rt\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:02 crc kubenswrapper[4816]: I0216 15:30:02.015465 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtd4r\" (UniqueName: \"kubernetes.io/projected/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-kube-api-access-wtd4r\") pod \"redhat-operators-r29rt\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:02 crc kubenswrapper[4816]: I0216 15:30:02.015510 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-catalog-content\") pod \"redhat-operators-r29rt\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:02 crc kubenswrapper[4816]: I0216 15:30:02.015537 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-utilities\") pod \"redhat-operators-r29rt\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:02 crc kubenswrapper[4816]: I0216 15:30:02.016359 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-utilities\") pod \"redhat-operators-r29rt\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:02 crc kubenswrapper[4816]: I0216 15:30:02.016527 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-catalog-content\") pod \"redhat-operators-r29rt\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:02 crc kubenswrapper[4816]: I0216 15:30:02.034484 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtd4r\" (UniqueName: \"kubernetes.io/projected/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-kube-api-access-wtd4r\") pod \"redhat-operators-r29rt\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:02 crc kubenswrapper[4816]: I0216 15:30:02.058037 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:02 crc kubenswrapper[4816]: I0216 15:30:02.613471 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r29rt"] Feb 16 15:30:02 crc kubenswrapper[4816]: I0216 15:30:02.820001 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r29rt" event={"ID":"78cb5577-fa54-43cf-a105-1b2bd7bf53b9","Type":"ContainerStarted","Data":"e154930a80a49f771d42bf1d8de144739e2560786eb199bd8267af2611a9a7f1"} Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.170236 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.170528 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="597a1a8c-07ae-48f5-a305-9db9b431ebf8" containerName="adoption" containerID="cri-o://9f63bd45b1595ea6623a86e08a4a2096b4004637fea2dd24f46bf1ba93d76494" gracePeriod=30 Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.227722 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.347541 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89a18063-aa20-421b-8605-e4dc95d035c4-secret-volume\") pod \"89a18063-aa20-421b-8605-e4dc95d035c4\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.347582 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89a18063-aa20-421b-8605-e4dc95d035c4-config-volume\") pod \"89a18063-aa20-421b-8605-e4dc95d035c4\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.347749 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz485\" (UniqueName: \"kubernetes.io/projected/89a18063-aa20-421b-8605-e4dc95d035c4-kube-api-access-tz485\") pod \"89a18063-aa20-421b-8605-e4dc95d035c4\" (UID: \"89a18063-aa20-421b-8605-e4dc95d035c4\") " Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.349517 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89a18063-aa20-421b-8605-e4dc95d035c4-config-volume" (OuterVolumeSpecName: "config-volume") pod "89a18063-aa20-421b-8605-e4dc95d035c4" (UID: "89a18063-aa20-421b-8605-e4dc95d035c4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.357971 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89a18063-aa20-421b-8605-e4dc95d035c4-kube-api-access-tz485" (OuterVolumeSpecName: "kube-api-access-tz485") pod "89a18063-aa20-421b-8605-e4dc95d035c4" (UID: "89a18063-aa20-421b-8605-e4dc95d035c4"). InnerVolumeSpecName "kube-api-access-tz485". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.367670 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89a18063-aa20-421b-8605-e4dc95d035c4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "89a18063-aa20-421b-8605-e4dc95d035c4" (UID: "89a18063-aa20-421b-8605-e4dc95d035c4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.451758 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz485\" (UniqueName: \"kubernetes.io/projected/89a18063-aa20-421b-8605-e4dc95d035c4-kube-api-access-tz485\") on node \"crc\" DevicePath \"\"" Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.451789 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89a18063-aa20-421b-8605-e4dc95d035c4-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.451801 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89a18063-aa20-421b-8605-e4dc95d035c4-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.830023 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" event={"ID":"89a18063-aa20-421b-8605-e4dc95d035c4","Type":"ContainerDied","Data":"831b4e3752c689994b983934d5b17a33d1cd612abe0832def69f382bdd22048c"} Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.830084 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="831b4e3752c689994b983934d5b17a33d1cd612abe0832def69f382bdd22048c" Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.830046 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520930-lfgk6" Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.832202 4816 generic.go:334] "Generic (PLEG): container finished" podID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerID="99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43" exitCode=0 Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.832246 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r29rt" event={"ID":"78cb5577-fa54-43cf-a105-1b2bd7bf53b9","Type":"ContainerDied","Data":"99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43"} Feb 16 15:30:03 crc kubenswrapper[4816]: I0216 15:30:03.834793 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 15:30:04 crc kubenswrapper[4816]: I0216 15:30:04.330420 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr"] Feb 16 15:30:04 crc kubenswrapper[4816]: I0216 15:30:04.342200 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520885-d6ksr"] Feb 16 15:30:05 crc kubenswrapper[4816]: I0216 15:30:05.410989 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4805de8d-a273-4606-9758-3640f20946d5" path="/var/lib/kubelet/pods/4805de8d-a273-4606-9758-3640f20946d5/volumes" Feb 16 15:30:05 crc kubenswrapper[4816]: I0216 15:30:05.860976 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r29rt" event={"ID":"78cb5577-fa54-43cf-a105-1b2bd7bf53b9","Type":"ContainerStarted","Data":"36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35"} Feb 16 15:30:08 crc kubenswrapper[4816]: I0216 15:30:08.912408 4816 generic.go:334] "Generic (PLEG): container finished" podID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerID="36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35" exitCode=0 Feb 16 15:30:08 crc kubenswrapper[4816]: I0216 15:30:08.912534 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r29rt" event={"ID":"78cb5577-fa54-43cf-a105-1b2bd7bf53b9","Type":"ContainerDied","Data":"36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35"} Feb 16 15:30:09 crc kubenswrapper[4816]: I0216 15:30:09.925889 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r29rt" event={"ID":"78cb5577-fa54-43cf-a105-1b2bd7bf53b9","Type":"ContainerStarted","Data":"3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415"} Feb 16 15:30:09 crc kubenswrapper[4816]: I0216 15:30:09.953919 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r29rt" podStartSLOduration=3.482128489 podStartE2EDuration="8.953871831s" podCreationTimestamp="2026-02-16 15:30:01 +0000 UTC" firstStartedPulling="2026-02-16 15:30:03.83426805 +0000 UTC m=+8803.160981818" lastFinishedPulling="2026-02-16 15:30:09.306011422 +0000 UTC m=+8808.632725160" observedRunningTime="2026-02-16 15:30:09.943956361 +0000 UTC m=+8809.270670119" watchObservedRunningTime="2026-02-16 15:30:09.953871831 +0000 UTC m=+8809.280585569" Feb 16 15:30:12 crc kubenswrapper[4816]: I0216 15:30:12.058939 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:12 crc kubenswrapper[4816]: I0216 15:30:12.059467 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:13 crc kubenswrapper[4816]: I0216 15:30:13.107048 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r29rt" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerName="registry-server" probeResult="failure" output=< Feb 16 15:30:13 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 15:30:13 crc kubenswrapper[4816]: > Feb 16 15:30:22 crc kubenswrapper[4816]: I0216 15:30:22.120847 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:22 crc kubenswrapper[4816]: I0216 15:30:22.186158 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:25 crc kubenswrapper[4816]: I0216 15:30:25.494925 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r29rt"] Feb 16 15:30:25 crc kubenswrapper[4816]: I0216 15:30:25.495649 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r29rt" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerName="registry-server" containerID="cri-o://3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415" gracePeriod=2 Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.051430 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.103802 4816 generic.go:334] "Generic (PLEG): container finished" podID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerID="3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415" exitCode=0 Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.103884 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r29rt" event={"ID":"78cb5577-fa54-43cf-a105-1b2bd7bf53b9","Type":"ContainerDied","Data":"3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415"} Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.103932 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r29rt" event={"ID":"78cb5577-fa54-43cf-a105-1b2bd7bf53b9","Type":"ContainerDied","Data":"e154930a80a49f771d42bf1d8de144739e2560786eb199bd8267af2611a9a7f1"} Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.103976 4816 scope.go:117] "RemoveContainer" containerID="3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.104282 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r29rt" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.140982 4816 scope.go:117] "RemoveContainer" containerID="36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.165348 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtd4r\" (UniqueName: \"kubernetes.io/projected/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-kube-api-access-wtd4r\") pod \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.165575 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-utilities\") pod \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.165877 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-catalog-content\") pod \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\" (UID: \"78cb5577-fa54-43cf-a105-1b2bd7bf53b9\") " Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.169488 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-utilities" (OuterVolumeSpecName: "utilities") pod "78cb5577-fa54-43cf-a105-1b2bd7bf53b9" (UID: "78cb5577-fa54-43cf-a105-1b2bd7bf53b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.171811 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.172465 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-kube-api-access-wtd4r" (OuterVolumeSpecName: "kube-api-access-wtd4r") pod "78cb5577-fa54-43cf-a105-1b2bd7bf53b9" (UID: "78cb5577-fa54-43cf-a105-1b2bd7bf53b9"). InnerVolumeSpecName "kube-api-access-wtd4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.178106 4816 scope.go:117] "RemoveContainer" containerID="99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.262617 4816 scope.go:117] "RemoveContainer" containerID="3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415" Feb 16 15:30:26 crc kubenswrapper[4816]: E0216 15:30:26.263350 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415\": container with ID starting with 3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415 not found: ID does not exist" containerID="3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.263415 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415"} err="failed to get container status \"3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415\": rpc error: code = NotFound desc = could not find container \"3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415\": container with ID starting with 3761c86e8e471ee2c540381d0891be849ff157bf38c4a46e9393f82f9e627415 not found: ID does not exist" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.263454 4816 scope.go:117] "RemoveContainer" containerID="36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35" Feb 16 15:30:26 crc kubenswrapper[4816]: E0216 15:30:26.263928 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35\": container with ID starting with 36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35 not found: ID does not exist" containerID="36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.264058 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35"} err="failed to get container status \"36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35\": rpc error: code = NotFound desc = could not find container \"36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35\": container with ID starting with 36c923b5dfd7ef949ccff011500df4ac7f9fa921af92fc9f67916ddba4912f35 not found: ID does not exist" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.264133 4816 scope.go:117] "RemoveContainer" containerID="99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43" Feb 16 15:30:26 crc kubenswrapper[4816]: E0216 15:30:26.268442 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43\": container with ID starting with 99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43 not found: ID does not exist" containerID="99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.268498 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43"} err="failed to get container status \"99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43\": rpc error: code = NotFound desc = could not find container \"99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43\": container with ID starting with 99b941484125b932a3323b8289c0eb04db5ea806254b8d6d59b291e159d0bb43 not found: ID does not exist" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.275411 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtd4r\" (UniqueName: \"kubernetes.io/projected/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-kube-api-access-wtd4r\") on node \"crc\" DevicePath \"\"" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.288985 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78cb5577-fa54-43cf-a105-1b2bd7bf53b9" (UID: "78cb5577-fa54-43cf-a105-1b2bd7bf53b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.376954 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78cb5577-fa54-43cf-a105-1b2bd7bf53b9-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.462758 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r29rt"] Feb 16 15:30:26 crc kubenswrapper[4816]: I0216 15:30:26.474271 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r29rt"] Feb 16 15:30:27 crc kubenswrapper[4816]: I0216 15:30:27.417507 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" path="/var/lib/kubelet/pods/78cb5577-fa54-43cf-a105-1b2bd7bf53b9/volumes" Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.745304 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.751063 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\") pod \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\" (UID: \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\") " Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.751272 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh78j\" (UniqueName: \"kubernetes.io/projected/597a1a8c-07ae-48f5-a305-9db9b431ebf8-kube-api-access-sh78j\") pod \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\" (UID: \"597a1a8c-07ae-48f5-a305-9db9b431ebf8\") " Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.757970 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/597a1a8c-07ae-48f5-a305-9db9b431ebf8-kube-api-access-sh78j" (OuterVolumeSpecName: "kube-api-access-sh78j") pod "597a1a8c-07ae-48f5-a305-9db9b431ebf8" (UID: "597a1a8c-07ae-48f5-a305-9db9b431ebf8"). InnerVolumeSpecName "kube-api-access-sh78j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.773106 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d" (OuterVolumeSpecName: "mariadb-data") pod "597a1a8c-07ae-48f5-a305-9db9b431ebf8" (UID: "597a1a8c-07ae-48f5-a305-9db9b431ebf8"). InnerVolumeSpecName "pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.853026 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh78j\" (UniqueName: \"kubernetes.io/projected/597a1a8c-07ae-48f5-a305-9db9b431ebf8-kube-api-access-sh78j\") on node \"crc\" DevicePath \"\"" Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.853114 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\") on node \"crc\" " Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.877427 4816 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.877577 4816 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d") on node "crc" Feb 16 15:30:33 crc kubenswrapper[4816]: I0216 15:30:33.955687 4816 reconciler_common.go:293] "Volume detached for volume \"pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca7b6c01-962c-4ff1-a3a3-69eee87a1f9d\") on node \"crc\" DevicePath \"\"" Feb 16 15:30:34 crc kubenswrapper[4816]: I0216 15:30:34.026224 4816 scope.go:117] "RemoveContainer" containerID="9f63bd45b1595ea6623a86e08a4a2096b4004637fea2dd24f46bf1ba93d76494" Feb 16 15:30:34 crc kubenswrapper[4816]: I0216 15:30:34.052387 4816 scope.go:117] "RemoveContainer" containerID="7891f3163869fccfa63dab749bed4b2d62bcb2f32ad4a8e49123e0d3dceb3bcc" Feb 16 15:30:34 crc kubenswrapper[4816]: I0216 15:30:34.188468 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Feb 16 15:30:34 crc kubenswrapper[4816]: I0216 15:30:34.191016 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"597a1a8c-07ae-48f5-a305-9db9b431ebf8","Type":"ContainerDied","Data":"9f63bd45b1595ea6623a86e08a4a2096b4004637fea2dd24f46bf1ba93d76494"} Feb 16 15:30:34 crc kubenswrapper[4816]: I0216 15:30:34.191061 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"597a1a8c-07ae-48f5-a305-9db9b431ebf8","Type":"ContainerDied","Data":"786b8639a7a1e12ddb483deb22a0baa61928b893ae03e54e6f9cf029edb34078"} Feb 16 15:30:34 crc kubenswrapper[4816]: I0216 15:30:34.245991 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Feb 16 15:30:34 crc kubenswrapper[4816]: I0216 15:30:34.259097 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Feb 16 15:30:34 crc kubenswrapper[4816]: I0216 15:30:34.830530 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Feb 16 15:30:34 crc kubenswrapper[4816]: I0216 15:30:34.830795 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="f0778ea5-8a1a-4366-ab14-7ef29ec67351" containerName="adoption" containerID="cri-o://8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8" gracePeriod=30 Feb 16 15:30:35 crc kubenswrapper[4816]: I0216 15:30:35.409586 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="597a1a8c-07ae-48f5-a305-9db9b431ebf8" path="/var/lib/kubelet/pods/597a1a8c-07ae-48f5-a305-9db9b431ebf8/volumes" Feb 16 15:30:36 crc kubenswrapper[4816]: I0216 15:30:36.940695 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:30:36 crc kubenswrapper[4816]: I0216 15:30:36.940769 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.506621 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cn9b7"] Feb 16 15:30:57 crc kubenswrapper[4816]: E0216 15:30:57.507876 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a18063-aa20-421b-8605-e4dc95d035c4" containerName="collect-profiles" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.507892 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a18063-aa20-421b-8605-e4dc95d035c4" containerName="collect-profiles" Feb 16 15:30:57 crc kubenswrapper[4816]: E0216 15:30:57.507920 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerName="extract-content" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.507929 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerName="extract-content" Feb 16 15:30:57 crc kubenswrapper[4816]: E0216 15:30:57.507954 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerName="extract-utilities" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.507966 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerName="extract-utilities" Feb 16 15:30:57 crc kubenswrapper[4816]: E0216 15:30:57.507983 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerName="registry-server" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.507991 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerName="registry-server" Feb 16 15:30:57 crc kubenswrapper[4816]: E0216 15:30:57.508030 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="597a1a8c-07ae-48f5-a305-9db9b431ebf8" containerName="adoption" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.508039 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="597a1a8c-07ae-48f5-a305-9db9b431ebf8" containerName="adoption" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.508380 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a18063-aa20-421b-8605-e4dc95d035c4" containerName="collect-profiles" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.508415 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="78cb5577-fa54-43cf-a105-1b2bd7bf53b9" containerName="registry-server" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.508450 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="597a1a8c-07ae-48f5-a305-9db9b431ebf8" containerName="adoption" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.510949 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.527748 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn9b7"] Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.610525 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-utilities\") pod \"redhat-marketplace-cn9b7\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.610939 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7pj9\" (UniqueName: \"kubernetes.io/projected/65f4e7ff-3fa1-494b-aa79-f306ffb83383-kube-api-access-w7pj9\") pod \"redhat-marketplace-cn9b7\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.611070 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-catalog-content\") pod \"redhat-marketplace-cn9b7\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.712955 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-utilities\") pod \"redhat-marketplace-cn9b7\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.713039 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7pj9\" (UniqueName: \"kubernetes.io/projected/65f4e7ff-3fa1-494b-aa79-f306ffb83383-kube-api-access-w7pj9\") pod \"redhat-marketplace-cn9b7\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.713112 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-catalog-content\") pod \"redhat-marketplace-cn9b7\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.713530 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-utilities\") pod \"redhat-marketplace-cn9b7\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.713721 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-catalog-content\") pod \"redhat-marketplace-cn9b7\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.731912 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7pj9\" (UniqueName: \"kubernetes.io/projected/65f4e7ff-3fa1-494b-aa79-f306ffb83383-kube-api-access-w7pj9\") pod \"redhat-marketplace-cn9b7\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:57 crc kubenswrapper[4816]: I0216 15:30:57.846743 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:30:58 crc kubenswrapper[4816]: I0216 15:30:58.347083 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn9b7"] Feb 16 15:30:58 crc kubenswrapper[4816]: I0216 15:30:58.488376 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn9b7" event={"ID":"65f4e7ff-3fa1-494b-aa79-f306ffb83383","Type":"ContainerStarted","Data":"2d6d48ef536460d7afc37be2eaefa7a0acf939f1b73105c52ef6281bdc538388"} Feb 16 15:30:59 crc kubenswrapper[4816]: I0216 15:30:59.500525 4816 generic.go:334] "Generic (PLEG): container finished" podID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerID="100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190" exitCode=0 Feb 16 15:30:59 crc kubenswrapper[4816]: I0216 15:30:59.500594 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn9b7" event={"ID":"65f4e7ff-3fa1-494b-aa79-f306ffb83383","Type":"ContainerDied","Data":"100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190"} Feb 16 15:31:01 crc kubenswrapper[4816]: I0216 15:31:01.525807 4816 generic.go:334] "Generic (PLEG): container finished" podID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerID="9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f" exitCode=0 Feb 16 15:31:01 crc kubenswrapper[4816]: I0216 15:31:01.525861 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn9b7" event={"ID":"65f4e7ff-3fa1-494b-aa79-f306ffb83383","Type":"ContainerDied","Data":"9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f"} Feb 16 15:31:02 crc kubenswrapper[4816]: I0216 15:31:02.538546 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn9b7" event={"ID":"65f4e7ff-3fa1-494b-aa79-f306ffb83383","Type":"ContainerStarted","Data":"5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd"} Feb 16 15:31:02 crc kubenswrapper[4816]: I0216 15:31:02.562357 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cn9b7" podStartSLOduration=3.085617928 podStartE2EDuration="5.562335215s" podCreationTimestamp="2026-02-16 15:30:57 +0000 UTC" firstStartedPulling="2026-02-16 15:30:59.503397218 +0000 UTC m=+8858.830110946" lastFinishedPulling="2026-02-16 15:31:01.980114505 +0000 UTC m=+8861.306828233" observedRunningTime="2026-02-16 15:31:02.559550669 +0000 UTC m=+8861.886264417" watchObservedRunningTime="2026-02-16 15:31:02.562335215 +0000 UTC m=+8861.889048943" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.332234 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.398728 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c21d94f4-d86c-4248-a0b6-151f44818268\") pod \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.398820 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xzln\" (UniqueName: \"kubernetes.io/projected/f0778ea5-8a1a-4366-ab14-7ef29ec67351-kube-api-access-5xzln\") pod \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.398898 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/f0778ea5-8a1a-4366-ab14-7ef29ec67351-ovn-data-cert\") pod \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\" (UID: \"f0778ea5-8a1a-4366-ab14-7ef29ec67351\") " Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.405758 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0778ea5-8a1a-4366-ab14-7ef29ec67351-kube-api-access-5xzln" (OuterVolumeSpecName: "kube-api-access-5xzln") pod "f0778ea5-8a1a-4366-ab14-7ef29ec67351" (UID: "f0778ea5-8a1a-4366-ab14-7ef29ec67351"). InnerVolumeSpecName "kube-api-access-5xzln". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.405846 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0778ea5-8a1a-4366-ab14-7ef29ec67351-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "f0778ea5-8a1a-4366-ab14-7ef29ec67351" (UID: "f0778ea5-8a1a-4366-ab14-7ef29ec67351"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.428061 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c21d94f4-d86c-4248-a0b6-151f44818268" (OuterVolumeSpecName: "ovn-data") pod "f0778ea5-8a1a-4366-ab14-7ef29ec67351" (UID: "f0778ea5-8a1a-4366-ab14-7ef29ec67351"). InnerVolumeSpecName "pvc-c21d94f4-d86c-4248-a0b6-151f44818268". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.501333 4816 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c21d94f4-d86c-4248-a0b6-151f44818268\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c21d94f4-d86c-4248-a0b6-151f44818268\") on node \"crc\" " Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.501370 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xzln\" (UniqueName: \"kubernetes.io/projected/f0778ea5-8a1a-4366-ab14-7ef29ec67351-kube-api-access-5xzln\") on node \"crc\" DevicePath \"\"" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.501381 4816 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/f0778ea5-8a1a-4366-ab14-7ef29ec67351-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.532529 4816 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.532724 4816 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c21d94f4-d86c-4248-a0b6-151f44818268" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c21d94f4-d86c-4248-a0b6-151f44818268") on node "crc" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.571851 4816 generic.go:334] "Generic (PLEG): container finished" podID="f0778ea5-8a1a-4366-ab14-7ef29ec67351" containerID="8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8" exitCode=137 Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.571903 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.571897 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"f0778ea5-8a1a-4366-ab14-7ef29ec67351","Type":"ContainerDied","Data":"8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8"} Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.572040 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"f0778ea5-8a1a-4366-ab14-7ef29ec67351","Type":"ContainerDied","Data":"f6cb4c25c3f778a44694437ad141e3ddd4bcffff745c856b4899c929ccec1e41"} Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.572073 4816 scope.go:117] "RemoveContainer" containerID="8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.605071 4816 reconciler_common.go:293] "Volume detached for volume \"pvc-c21d94f4-d86c-4248-a0b6-151f44818268\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c21d94f4-d86c-4248-a0b6-151f44818268\") on node \"crc\" DevicePath \"\"" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.610330 4816 scope.go:117] "RemoveContainer" containerID="8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8" Feb 16 15:31:05 crc kubenswrapper[4816]: E0216 15:31:05.610702 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8\": container with ID starting with 8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8 not found: ID does not exist" containerID="8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.610742 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8"} err="failed to get container status \"8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8\": rpc error: code = NotFound desc = could not find container \"8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8\": container with ID starting with 8638d3560995d3494c332f4cf23cd4b166db2163d1a23b51f044e5a9802804f8 not found: ID does not exist" Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.610914 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Feb 16 15:31:05 crc kubenswrapper[4816]: I0216 15:31:05.620619 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Feb 16 15:31:06 crc kubenswrapper[4816]: I0216 15:31:06.940261 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:31:06 crc kubenswrapper[4816]: I0216 15:31:06.940574 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:31:07 crc kubenswrapper[4816]: I0216 15:31:07.412846 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0778ea5-8a1a-4366-ab14-7ef29ec67351" path="/var/lib/kubelet/pods/f0778ea5-8a1a-4366-ab14-7ef29ec67351/volumes" Feb 16 15:31:07 crc kubenswrapper[4816]: I0216 15:31:07.847376 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:31:07 crc kubenswrapper[4816]: I0216 15:31:07.847428 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:31:07 crc kubenswrapper[4816]: I0216 15:31:07.891396 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:31:08 crc kubenswrapper[4816]: I0216 15:31:08.656440 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:31:08 crc kubenswrapper[4816]: I0216 15:31:08.707542 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn9b7"] Feb 16 15:31:10 crc kubenswrapper[4816]: I0216 15:31:10.619710 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cn9b7" podUID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerName="registry-server" containerID="cri-o://5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd" gracePeriod=2 Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.595000 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.636967 4816 generic.go:334] "Generic (PLEG): container finished" podID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerID="5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd" exitCode=0 Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.637057 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cn9b7" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.637066 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn9b7" event={"ID":"65f4e7ff-3fa1-494b-aa79-f306ffb83383","Type":"ContainerDied","Data":"5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd"} Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.637254 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cn9b7" event={"ID":"65f4e7ff-3fa1-494b-aa79-f306ffb83383","Type":"ContainerDied","Data":"2d6d48ef536460d7afc37be2eaefa7a0acf939f1b73105c52ef6281bdc538388"} Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.637295 4816 scope.go:117] "RemoveContainer" containerID="5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.656023 4816 scope.go:117] "RemoveContainer" containerID="9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.683163 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7pj9\" (UniqueName: \"kubernetes.io/projected/65f4e7ff-3fa1-494b-aa79-f306ffb83383-kube-api-access-w7pj9\") pod \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.683278 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-utilities\") pod \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.683307 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-catalog-content\") pod \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\" (UID: \"65f4e7ff-3fa1-494b-aa79-f306ffb83383\") " Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.684707 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-utilities" (OuterVolumeSpecName: "utilities") pod "65f4e7ff-3fa1-494b-aa79-f306ffb83383" (UID: "65f4e7ff-3fa1-494b-aa79-f306ffb83383"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.687818 4816 scope.go:117] "RemoveContainer" containerID="100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.690253 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65f4e7ff-3fa1-494b-aa79-f306ffb83383-kube-api-access-w7pj9" (OuterVolumeSpecName: "kube-api-access-w7pj9") pod "65f4e7ff-3fa1-494b-aa79-f306ffb83383" (UID: "65f4e7ff-3fa1-494b-aa79-f306ffb83383"). InnerVolumeSpecName "kube-api-access-w7pj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.711288 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "65f4e7ff-3fa1-494b-aa79-f306ffb83383" (UID: "65f4e7ff-3fa1-494b-aa79-f306ffb83383"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.772243 4816 scope.go:117] "RemoveContainer" containerID="5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd" Feb 16 15:31:11 crc kubenswrapper[4816]: E0216 15:31:11.773053 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd\": container with ID starting with 5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd not found: ID does not exist" containerID="5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.773112 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd"} err="failed to get container status \"5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd\": rpc error: code = NotFound desc = could not find container \"5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd\": container with ID starting with 5490747a865a8ae6ea3707ad8c0cd6d19bcad9d309149427608639ca9cf5f4cd not found: ID does not exist" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.773147 4816 scope.go:117] "RemoveContainer" containerID="9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f" Feb 16 15:31:11 crc kubenswrapper[4816]: E0216 15:31:11.774516 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f\": container with ID starting with 9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f not found: ID does not exist" containerID="9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.774694 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f"} err="failed to get container status \"9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f\": rpc error: code = NotFound desc = could not find container \"9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f\": container with ID starting with 9e868bf6a7895afd8d3fb368f0b658cd6c0e1b2ecbed121a5d32928f022b2f0f not found: ID does not exist" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.774814 4816 scope.go:117] "RemoveContainer" containerID="100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190" Feb 16 15:31:11 crc kubenswrapper[4816]: E0216 15:31:11.776230 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190\": container with ID starting with 100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190 not found: ID does not exist" containerID="100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.776281 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190"} err="failed to get container status \"100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190\": rpc error: code = NotFound desc = could not find container \"100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190\": container with ID starting with 100fdb5cd04908603f79aae9b95350467235eb1bb75a380ece30909a93ba4190 not found: ID does not exist" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.786911 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.786963 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65f4e7ff-3fa1-494b-aa79-f306ffb83383-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.786978 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7pj9\" (UniqueName: \"kubernetes.io/projected/65f4e7ff-3fa1-494b-aa79-f306ffb83383-kube-api-access-w7pj9\") on node \"crc\" DevicePath \"\"" Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.979352 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn9b7"] Feb 16 15:31:11 crc kubenswrapper[4816]: I0216 15:31:11.991948 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cn9b7"] Feb 16 15:31:13 crc kubenswrapper[4816]: I0216 15:31:13.411690 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" path="/var/lib/kubelet/pods/65f4e7ff-3fa1-494b-aa79-f306ffb83383/volumes" Feb 16 15:31:36 crc kubenswrapper[4816]: I0216 15:31:36.941336 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:31:36 crc kubenswrapper[4816]: I0216 15:31:36.942457 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:31:36 crc kubenswrapper[4816]: I0216 15:31:36.942553 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 15:31:36 crc kubenswrapper[4816]: I0216 15:31:36.944436 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 15:31:36 crc kubenswrapper[4816]: I0216 15:31:36.944560 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" gracePeriod=600 Feb 16 15:31:37 crc kubenswrapper[4816]: E0216 15:31:37.741128 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:31:37 crc kubenswrapper[4816]: I0216 15:31:37.902519 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" exitCode=0 Feb 16 15:31:37 crc kubenswrapper[4816]: I0216 15:31:37.902579 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64"} Feb 16 15:31:37 crc kubenswrapper[4816]: I0216 15:31:37.902624 4816 scope.go:117] "RemoveContainer" containerID="577ce560bb82351791a6fc74b7dcadb020b11578ab141cbc58f79d5f440d65b3" Feb 16 15:31:37 crc kubenswrapper[4816]: I0216 15:31:37.903393 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:31:37 crc kubenswrapper[4816]: E0216 15:31:37.903726 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:31:49 crc kubenswrapper[4816]: I0216 15:31:49.400276 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:31:49 crc kubenswrapper[4816]: E0216 15:31:49.401949 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.370358 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cmn85/must-gather-2mfgd"] Feb 16 15:32:00 crc kubenswrapper[4816]: E0216 15:32:00.371577 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0778ea5-8a1a-4366-ab14-7ef29ec67351" containerName="adoption" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.371597 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0778ea5-8a1a-4366-ab14-7ef29ec67351" containerName="adoption" Feb 16 15:32:00 crc kubenswrapper[4816]: E0216 15:32:00.371615 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerName="extract-utilities" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.371623 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerName="extract-utilities" Feb 16 15:32:00 crc kubenswrapper[4816]: E0216 15:32:00.372877 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerName="extract-content" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.372914 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerName="extract-content" Feb 16 15:32:00 crc kubenswrapper[4816]: E0216 15:32:00.372944 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerName="registry-server" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.372953 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerName="registry-server" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.373262 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0778ea5-8a1a-4366-ab14-7ef29ec67351" containerName="adoption" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.373302 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f4e7ff-3fa1-494b-aa79-f306ffb83383" containerName="registry-server" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.375466 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.377994 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-cmn85"/"default-dockercfg-dl4w6" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.378301 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-cmn85"/"openshift-service-ca.crt" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.378349 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-cmn85"/"kube-root-ca.crt" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.382029 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-cmn85/must-gather-2mfgd"] Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.440724 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/58505032-b628-4004-ba48-6cb633d4e3ec-must-gather-output\") pod \"must-gather-2mfgd\" (UID: \"58505032-b628-4004-ba48-6cb633d4e3ec\") " pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.441351 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pgdt\" (UniqueName: \"kubernetes.io/projected/58505032-b628-4004-ba48-6cb633d4e3ec-kube-api-access-8pgdt\") pod \"must-gather-2mfgd\" (UID: \"58505032-b628-4004-ba48-6cb633d4e3ec\") " pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.542924 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/58505032-b628-4004-ba48-6cb633d4e3ec-must-gather-output\") pod \"must-gather-2mfgd\" (UID: \"58505032-b628-4004-ba48-6cb633d4e3ec\") " pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.543086 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pgdt\" (UniqueName: \"kubernetes.io/projected/58505032-b628-4004-ba48-6cb633d4e3ec-kube-api-access-8pgdt\") pod \"must-gather-2mfgd\" (UID: \"58505032-b628-4004-ba48-6cb633d4e3ec\") " pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.543396 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/58505032-b628-4004-ba48-6cb633d4e3ec-must-gather-output\") pod \"must-gather-2mfgd\" (UID: \"58505032-b628-4004-ba48-6cb633d4e3ec\") " pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.563285 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pgdt\" (UniqueName: \"kubernetes.io/projected/58505032-b628-4004-ba48-6cb633d4e3ec-kube-api-access-8pgdt\") pod \"must-gather-2mfgd\" (UID: \"58505032-b628-4004-ba48-6cb633d4e3ec\") " pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:32:00 crc kubenswrapper[4816]: I0216 15:32:00.700158 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:32:01 crc kubenswrapper[4816]: I0216 15:32:01.195890 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-cmn85/must-gather-2mfgd"] Feb 16 15:32:02 crc kubenswrapper[4816]: I0216 15:32:02.176045 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/must-gather-2mfgd" event={"ID":"58505032-b628-4004-ba48-6cb633d4e3ec","Type":"ContainerStarted","Data":"17bcbae2c369b377fa11a3938eed9102d123a2e8f9fcf2ebe43ee7b1ff1f3b6e"} Feb 16 15:32:03 crc kubenswrapper[4816]: I0216 15:32:03.399399 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:32:03 crc kubenswrapper[4816]: E0216 15:32:03.400509 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:32:09 crc kubenswrapper[4816]: I0216 15:32:09.314538 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/must-gather-2mfgd" event={"ID":"58505032-b628-4004-ba48-6cb633d4e3ec","Type":"ContainerStarted","Data":"f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f"} Feb 16 15:32:09 crc kubenswrapper[4816]: I0216 15:32:09.315103 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/must-gather-2mfgd" event={"ID":"58505032-b628-4004-ba48-6cb633d4e3ec","Type":"ContainerStarted","Data":"b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1"} Feb 16 15:32:09 crc kubenswrapper[4816]: I0216 15:32:09.341208 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-cmn85/must-gather-2mfgd" podStartSLOduration=2.486956463 podStartE2EDuration="9.341190018s" podCreationTimestamp="2026-02-16 15:32:00 +0000 UTC" firstStartedPulling="2026-02-16 15:32:01.514771104 +0000 UTC m=+8920.841484852" lastFinishedPulling="2026-02-16 15:32:08.369004679 +0000 UTC m=+8927.695718407" observedRunningTime="2026-02-16 15:32:09.330350512 +0000 UTC m=+8928.657064240" watchObservedRunningTime="2026-02-16 15:32:09.341190018 +0000 UTC m=+8928.667903746" Feb 16 15:32:13 crc kubenswrapper[4816]: I0216 15:32:13.653706 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cmn85/crc-debug-kvwhm"] Feb 16 15:32:13 crc kubenswrapper[4816]: I0216 15:32:13.655925 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:13 crc kubenswrapper[4816]: I0216 15:32:13.762863 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt5nf\" (UniqueName: \"kubernetes.io/projected/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-kube-api-access-kt5nf\") pod \"crc-debug-kvwhm\" (UID: \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\") " pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:13 crc kubenswrapper[4816]: I0216 15:32:13.763021 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-host\") pod \"crc-debug-kvwhm\" (UID: \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\") " pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:13 crc kubenswrapper[4816]: I0216 15:32:13.864182 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt5nf\" (UniqueName: \"kubernetes.io/projected/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-kube-api-access-kt5nf\") pod \"crc-debug-kvwhm\" (UID: \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\") " pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:13 crc kubenswrapper[4816]: I0216 15:32:13.864287 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-host\") pod \"crc-debug-kvwhm\" (UID: \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\") " pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:13 crc kubenswrapper[4816]: I0216 15:32:13.864454 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-host\") pod \"crc-debug-kvwhm\" (UID: \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\") " pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:13 crc kubenswrapper[4816]: I0216 15:32:13.886412 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt5nf\" (UniqueName: \"kubernetes.io/projected/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-kube-api-access-kt5nf\") pod \"crc-debug-kvwhm\" (UID: \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\") " pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:14 crc kubenswrapper[4816]: I0216 15:32:14.078964 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:14 crc kubenswrapper[4816]: I0216 15:32:14.367621 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/crc-debug-kvwhm" event={"ID":"75d29aab-9bec-4e8b-9982-fd6875ebfaf6","Type":"ContainerStarted","Data":"7fbe4e4d94d91be9e0a9624f63c41c73301e9f28dffbf69190fe21635a005908"} Feb 16 15:32:18 crc kubenswrapper[4816]: I0216 15:32:18.398968 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:32:18 crc kubenswrapper[4816]: E0216 15:32:18.399681 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:32:29 crc kubenswrapper[4816]: I0216 15:32:29.549158 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/crc-debug-kvwhm" event={"ID":"75d29aab-9bec-4e8b-9982-fd6875ebfaf6","Type":"ContainerStarted","Data":"f6f0f16c78281d05b812c450f70c220ddbce219ef0ae845069dea5f6a04817d1"} Feb 16 15:32:29 crc kubenswrapper[4816]: I0216 15:32:29.580620 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-cmn85/crc-debug-kvwhm" podStartSLOduration=1.836743669 podStartE2EDuration="16.580600018s" podCreationTimestamp="2026-02-16 15:32:13 +0000 UTC" firstStartedPulling="2026-02-16 15:32:14.16320666 +0000 UTC m=+8933.489920388" lastFinishedPulling="2026-02-16 15:32:28.907063009 +0000 UTC m=+8948.233776737" observedRunningTime="2026-02-16 15:32:29.562919466 +0000 UTC m=+8948.889633194" watchObservedRunningTime="2026-02-16 15:32:29.580600018 +0000 UTC m=+8948.907313746" Feb 16 15:32:33 crc kubenswrapper[4816]: I0216 15:32:33.402101 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:32:33 crc kubenswrapper[4816]: E0216 15:32:33.403327 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:32:46 crc kubenswrapper[4816]: I0216 15:32:46.398300 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:32:46 crc kubenswrapper[4816]: E0216 15:32:46.399070 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:32:53 crc kubenswrapper[4816]: I0216 15:32:53.895568 4816 generic.go:334] "Generic (PLEG): container finished" podID="75d29aab-9bec-4e8b-9982-fd6875ebfaf6" containerID="f6f0f16c78281d05b812c450f70c220ddbce219ef0ae845069dea5f6a04817d1" exitCode=0 Feb 16 15:32:53 crc kubenswrapper[4816]: I0216 15:32:53.895646 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/crc-debug-kvwhm" event={"ID":"75d29aab-9bec-4e8b-9982-fd6875ebfaf6","Type":"ContainerDied","Data":"f6f0f16c78281d05b812c450f70c220ddbce219ef0ae845069dea5f6a04817d1"} Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.052247 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.087901 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cmn85/crc-debug-kvwhm"] Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.099836 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cmn85/crc-debug-kvwhm"] Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.145603 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-host\") pod \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\" (UID: \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\") " Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.145712 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kt5nf\" (UniqueName: \"kubernetes.io/projected/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-kube-api-access-kt5nf\") pod \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\" (UID: \"75d29aab-9bec-4e8b-9982-fd6875ebfaf6\") " Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.145743 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-host" (OuterVolumeSpecName: "host") pod "75d29aab-9bec-4e8b-9982-fd6875ebfaf6" (UID: "75d29aab-9bec-4e8b-9982-fd6875ebfaf6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.146113 4816 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-host\") on node \"crc\" DevicePath \"\"" Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.160879 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-kube-api-access-kt5nf" (OuterVolumeSpecName: "kube-api-access-kt5nf") pod "75d29aab-9bec-4e8b-9982-fd6875ebfaf6" (UID: "75d29aab-9bec-4e8b-9982-fd6875ebfaf6"). InnerVolumeSpecName "kube-api-access-kt5nf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.247713 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kt5nf\" (UniqueName: \"kubernetes.io/projected/75d29aab-9bec-4e8b-9982-fd6875ebfaf6-kube-api-access-kt5nf\") on node \"crc\" DevicePath \"\"" Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.412385 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75d29aab-9bec-4e8b-9982-fd6875ebfaf6" path="/var/lib/kubelet/pods/75d29aab-9bec-4e8b-9982-fd6875ebfaf6/volumes" Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.915739 4816 scope.go:117] "RemoveContainer" containerID="f6f0f16c78281d05b812c450f70c220ddbce219ef0ae845069dea5f6a04817d1" Feb 16 15:32:55 crc kubenswrapper[4816]: I0216 15:32:55.915762 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-kvwhm" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.310735 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cmn85/crc-debug-txlc2"] Feb 16 15:32:56 crc kubenswrapper[4816]: E0216 15:32:56.311360 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75d29aab-9bec-4e8b-9982-fd6875ebfaf6" containerName="container-00" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.311387 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="75d29aab-9bec-4e8b-9982-fd6875ebfaf6" containerName="container-00" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.311737 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="75d29aab-9bec-4e8b-9982-fd6875ebfaf6" containerName="container-00" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.312692 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.370674 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f62f331b-80bb-418c-a166-07f4060e8701-host\") pod \"crc-debug-txlc2\" (UID: \"f62f331b-80bb-418c-a166-07f4060e8701\") " pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.371002 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsjs6\" (UniqueName: \"kubernetes.io/projected/f62f331b-80bb-418c-a166-07f4060e8701-kube-api-access-gsjs6\") pod \"crc-debug-txlc2\" (UID: \"f62f331b-80bb-418c-a166-07f4060e8701\") " pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.473540 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f62f331b-80bb-418c-a166-07f4060e8701-host\") pod \"crc-debug-txlc2\" (UID: \"f62f331b-80bb-418c-a166-07f4060e8701\") " pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.473600 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsjs6\" (UniqueName: \"kubernetes.io/projected/f62f331b-80bb-418c-a166-07f4060e8701-kube-api-access-gsjs6\") pod \"crc-debug-txlc2\" (UID: \"f62f331b-80bb-418c-a166-07f4060e8701\") " pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.478247 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f62f331b-80bb-418c-a166-07f4060e8701-host\") pod \"crc-debug-txlc2\" (UID: \"f62f331b-80bb-418c-a166-07f4060e8701\") " pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.711333 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsjs6\" (UniqueName: \"kubernetes.io/projected/f62f331b-80bb-418c-a166-07f4060e8701-kube-api-access-gsjs6\") pod \"crc-debug-txlc2\" (UID: \"f62f331b-80bb-418c-a166-07f4060e8701\") " pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:56 crc kubenswrapper[4816]: I0216 15:32:56.930238 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:57 crc kubenswrapper[4816]: I0216 15:32:57.399038 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:32:57 crc kubenswrapper[4816]: E0216 15:32:57.399634 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:32:57 crc kubenswrapper[4816]: I0216 15:32:57.936772 4816 generic.go:334] "Generic (PLEG): container finished" podID="f62f331b-80bb-418c-a166-07f4060e8701" containerID="e7854c608cf632b948601eb3e5500e2036d20075c86c51d9586103b2357668fb" exitCode=0 Feb 16 15:32:57 crc kubenswrapper[4816]: I0216 15:32:57.936828 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/crc-debug-txlc2" event={"ID":"f62f331b-80bb-418c-a166-07f4060e8701","Type":"ContainerDied","Data":"e7854c608cf632b948601eb3e5500e2036d20075c86c51d9586103b2357668fb"} Feb 16 15:32:57 crc kubenswrapper[4816]: I0216 15:32:57.936919 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/crc-debug-txlc2" event={"ID":"f62f331b-80bb-418c-a166-07f4060e8701","Type":"ContainerStarted","Data":"e44cf2f19bb6f52fff24d6b43476cc7ac57bae930fc077a91936e11baae8754a"} Feb 16 15:32:58 crc kubenswrapper[4816]: I0216 15:32:58.097263 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cmn85/crc-debug-txlc2"] Feb 16 15:32:58 crc kubenswrapper[4816]: I0216 15:32:58.106309 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cmn85/crc-debug-txlc2"] Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.069997 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.240321 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f62f331b-80bb-418c-a166-07f4060e8701-host\") pod \"f62f331b-80bb-418c-a166-07f4060e8701\" (UID: \"f62f331b-80bb-418c-a166-07f4060e8701\") " Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.240468 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f62f331b-80bb-418c-a166-07f4060e8701-host" (OuterVolumeSpecName: "host") pod "f62f331b-80bb-418c-a166-07f4060e8701" (UID: "f62f331b-80bb-418c-a166-07f4060e8701"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.240563 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsjs6\" (UniqueName: \"kubernetes.io/projected/f62f331b-80bb-418c-a166-07f4060e8701-kube-api-access-gsjs6\") pod \"f62f331b-80bb-418c-a166-07f4060e8701\" (UID: \"f62f331b-80bb-418c-a166-07f4060e8701\") " Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.241170 4816 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f62f331b-80bb-418c-a166-07f4060e8701-host\") on node \"crc\" DevicePath \"\"" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.245405 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f62f331b-80bb-418c-a166-07f4060e8701-kube-api-access-gsjs6" (OuterVolumeSpecName: "kube-api-access-gsjs6") pod "f62f331b-80bb-418c-a166-07f4060e8701" (UID: "f62f331b-80bb-418c-a166-07f4060e8701"). InnerVolumeSpecName "kube-api-access-gsjs6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.343433 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsjs6\" (UniqueName: \"kubernetes.io/projected/f62f331b-80bb-418c-a166-07f4060e8701-kube-api-access-gsjs6\") on node \"crc\" DevicePath \"\"" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.411730 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f62f331b-80bb-418c-a166-07f4060e8701" path="/var/lib/kubelet/pods/f62f331b-80bb-418c-a166-07f4060e8701/volumes" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.910947 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cmn85/crc-debug-kntqg"] Feb 16 15:32:59 crc kubenswrapper[4816]: E0216 15:32:59.911379 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62f331b-80bb-418c-a166-07f4060e8701" containerName="container-00" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.911398 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62f331b-80bb-418c-a166-07f4060e8701" containerName="container-00" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.911612 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="f62f331b-80bb-418c-a166-07f4060e8701" containerName="container-00" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.912473 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.956015 4816 scope.go:117] "RemoveContainer" containerID="e7854c608cf632b948601eb3e5500e2036d20075c86c51d9586103b2357668fb" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.956052 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d405bf74-3173-4eff-95bc-93e5ca156a91-host\") pod \"crc-debug-kntqg\" (UID: \"d405bf74-3173-4eff-95bc-93e5ca156a91\") " pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.956053 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-txlc2" Feb 16 15:32:59 crc kubenswrapper[4816]: I0216 15:32:59.956315 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9msp6\" (UniqueName: \"kubernetes.io/projected/d405bf74-3173-4eff-95bc-93e5ca156a91-kube-api-access-9msp6\") pod \"crc-debug-kntqg\" (UID: \"d405bf74-3173-4eff-95bc-93e5ca156a91\") " pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:33:00 crc kubenswrapper[4816]: I0216 15:33:00.058422 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d405bf74-3173-4eff-95bc-93e5ca156a91-host\") pod \"crc-debug-kntqg\" (UID: \"d405bf74-3173-4eff-95bc-93e5ca156a91\") " pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:33:00 crc kubenswrapper[4816]: I0216 15:33:00.058558 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9msp6\" (UniqueName: \"kubernetes.io/projected/d405bf74-3173-4eff-95bc-93e5ca156a91-kube-api-access-9msp6\") pod \"crc-debug-kntqg\" (UID: \"d405bf74-3173-4eff-95bc-93e5ca156a91\") " pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:33:00 crc kubenswrapper[4816]: I0216 15:33:00.058632 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d405bf74-3173-4eff-95bc-93e5ca156a91-host\") pod \"crc-debug-kntqg\" (UID: \"d405bf74-3173-4eff-95bc-93e5ca156a91\") " pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:33:00 crc kubenswrapper[4816]: I0216 15:33:00.088639 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9msp6\" (UniqueName: \"kubernetes.io/projected/d405bf74-3173-4eff-95bc-93e5ca156a91-kube-api-access-9msp6\") pod \"crc-debug-kntqg\" (UID: \"d405bf74-3173-4eff-95bc-93e5ca156a91\") " pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:33:00 crc kubenswrapper[4816]: I0216 15:33:00.230869 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:33:00 crc kubenswrapper[4816]: W0216 15:33:00.262825 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd405bf74_3173_4eff_95bc_93e5ca156a91.slice/crio-644b606b513b024f570b281da36f65f9d3e69701ead6e5e4be0fd59e27b01d55 WatchSource:0}: Error finding container 644b606b513b024f570b281da36f65f9d3e69701ead6e5e4be0fd59e27b01d55: Status 404 returned error can't find the container with id 644b606b513b024f570b281da36f65f9d3e69701ead6e5e4be0fd59e27b01d55 Feb 16 15:33:00 crc kubenswrapper[4816]: I0216 15:33:00.965113 4816 generic.go:334] "Generic (PLEG): container finished" podID="d405bf74-3173-4eff-95bc-93e5ca156a91" containerID="ad92fb6df7d6ba8e2a72f205ac37b6d0478eddffd7b825763f827e704f167e8e" exitCode=0 Feb 16 15:33:00 crc kubenswrapper[4816]: I0216 15:33:00.965212 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/crc-debug-kntqg" event={"ID":"d405bf74-3173-4eff-95bc-93e5ca156a91","Type":"ContainerDied","Data":"ad92fb6df7d6ba8e2a72f205ac37b6d0478eddffd7b825763f827e704f167e8e"} Feb 16 15:33:00 crc kubenswrapper[4816]: I0216 15:33:00.965439 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/crc-debug-kntqg" event={"ID":"d405bf74-3173-4eff-95bc-93e5ca156a91","Type":"ContainerStarted","Data":"644b606b513b024f570b281da36f65f9d3e69701ead6e5e4be0fd59e27b01d55"} Feb 16 15:33:01 crc kubenswrapper[4816]: I0216 15:33:01.055834 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cmn85/crc-debug-kntqg"] Feb 16 15:33:01 crc kubenswrapper[4816]: I0216 15:33:01.068763 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cmn85/crc-debug-kntqg"] Feb 16 15:33:02 crc kubenswrapper[4816]: I0216 15:33:02.199971 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:33:02 crc kubenswrapper[4816]: I0216 15:33:02.398772 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d405bf74-3173-4eff-95bc-93e5ca156a91-host\") pod \"d405bf74-3173-4eff-95bc-93e5ca156a91\" (UID: \"d405bf74-3173-4eff-95bc-93e5ca156a91\") " Feb 16 15:33:02 crc kubenswrapper[4816]: I0216 15:33:02.398905 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9msp6\" (UniqueName: \"kubernetes.io/projected/d405bf74-3173-4eff-95bc-93e5ca156a91-kube-api-access-9msp6\") pod \"d405bf74-3173-4eff-95bc-93e5ca156a91\" (UID: \"d405bf74-3173-4eff-95bc-93e5ca156a91\") " Feb 16 15:33:02 crc kubenswrapper[4816]: I0216 15:33:02.399075 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d405bf74-3173-4eff-95bc-93e5ca156a91-host" (OuterVolumeSpecName: "host") pod "d405bf74-3173-4eff-95bc-93e5ca156a91" (UID: "d405bf74-3173-4eff-95bc-93e5ca156a91"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 16 15:33:02 crc kubenswrapper[4816]: I0216 15:33:02.399626 4816 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d405bf74-3173-4eff-95bc-93e5ca156a91-host\") on node \"crc\" DevicePath \"\"" Feb 16 15:33:02 crc kubenswrapper[4816]: I0216 15:33:02.408112 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d405bf74-3173-4eff-95bc-93e5ca156a91-kube-api-access-9msp6" (OuterVolumeSpecName: "kube-api-access-9msp6") pod "d405bf74-3173-4eff-95bc-93e5ca156a91" (UID: "d405bf74-3173-4eff-95bc-93e5ca156a91"). InnerVolumeSpecName "kube-api-access-9msp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:33:02 crc kubenswrapper[4816]: I0216 15:33:02.503426 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9msp6\" (UniqueName: \"kubernetes.io/projected/d405bf74-3173-4eff-95bc-93e5ca156a91-kube-api-access-9msp6\") on node \"crc\" DevicePath \"\"" Feb 16 15:33:03 crc kubenswrapper[4816]: I0216 15:33:03.109273 4816 scope.go:117] "RemoveContainer" containerID="ad92fb6df7d6ba8e2a72f205ac37b6d0478eddffd7b825763f827e704f167e8e" Feb 16 15:33:03 crc kubenswrapper[4816]: I0216 15:33:03.109313 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/crc-debug-kntqg" Feb 16 15:33:03 crc kubenswrapper[4816]: I0216 15:33:03.412333 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d405bf74-3173-4eff-95bc-93e5ca156a91" path="/var/lib/kubelet/pods/d405bf74-3173-4eff-95bc-93e5ca156a91/volumes" Feb 16 15:33:08 crc kubenswrapper[4816]: I0216 15:33:08.398998 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:33:08 crc kubenswrapper[4816]: E0216 15:33:08.399706 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:33:22 crc kubenswrapper[4816]: I0216 15:33:22.399081 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:33:22 crc kubenswrapper[4816]: E0216 15:33:22.400861 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:33:33 crc kubenswrapper[4816]: I0216 15:33:33.399385 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:33:33 crc kubenswrapper[4816]: E0216 15:33:33.400306 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:33:45 crc kubenswrapper[4816]: I0216 15:33:45.398541 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:33:45 crc kubenswrapper[4816]: E0216 15:33:45.399330 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:33:57 crc kubenswrapper[4816]: I0216 15:33:57.399815 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:33:57 crc kubenswrapper[4816]: E0216 15:33:57.400615 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:34:11 crc kubenswrapper[4816]: I0216 15:34:11.406314 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:34:11 crc kubenswrapper[4816]: E0216 15:34:11.407229 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:34:22 crc kubenswrapper[4816]: I0216 15:34:22.399235 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:34:22 crc kubenswrapper[4816]: E0216 15:34:22.401394 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:34:35 crc kubenswrapper[4816]: I0216 15:34:35.399851 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:34:35 crc kubenswrapper[4816]: E0216 15:34:35.400718 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:34:49 crc kubenswrapper[4816]: I0216 15:34:49.399382 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:34:49 crc kubenswrapper[4816]: E0216 15:34:49.400275 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:35:00 crc kubenswrapper[4816]: I0216 15:35:00.413897 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:35:00 crc kubenswrapper[4816]: E0216 15:35:00.414705 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:35:14 crc kubenswrapper[4816]: I0216 15:35:14.399836 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:35:14 crc kubenswrapper[4816]: E0216 15:35:14.400977 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:35:26 crc kubenswrapper[4816]: I0216 15:35:26.399967 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:35:26 crc kubenswrapper[4816]: E0216 15:35:26.400960 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:35:38 crc kubenswrapper[4816]: I0216 15:35:38.406459 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:35:38 crc kubenswrapper[4816]: E0216 15:35:38.408451 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:35:51 crc kubenswrapper[4816]: I0216 15:35:51.411343 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:35:51 crc kubenswrapper[4816]: E0216 15:35:51.412301 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:36:06 crc kubenswrapper[4816]: I0216 15:36:06.400607 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:36:06 crc kubenswrapper[4816]: E0216 15:36:06.401713 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:36:21 crc kubenswrapper[4816]: I0216 15:36:21.410059 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:36:21 crc kubenswrapper[4816]: E0216 15:36:21.412373 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:36:35 crc kubenswrapper[4816]: I0216 15:36:35.398696 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:36:35 crc kubenswrapper[4816]: E0216 15:36:35.401463 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:36:49 crc kubenswrapper[4816]: I0216 15:36:49.398983 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:36:49 crc kubenswrapper[4816]: I0216 15:36:49.663238 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"d0e2ebe7423c9baf282febac1b199f56648fc3853dd1643a2793afe6c7da1d19"} Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.148819 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-768n5"] Feb 16 15:37:54 crc kubenswrapper[4816]: E0216 15:37:54.152151 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d405bf74-3173-4eff-95bc-93e5ca156a91" containerName="container-00" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.152529 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="d405bf74-3173-4eff-95bc-93e5ca156a91" containerName="container-00" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.152997 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="d405bf74-3173-4eff-95bc-93e5ca156a91" containerName="container-00" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.155762 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.176697 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-768n5"] Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.274753 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkdm7\" (UniqueName: \"kubernetes.io/projected/52961e17-f395-49a1-9e04-57e2dec35a9c-kube-api-access-fkdm7\") pod \"community-operators-768n5\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.274824 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-catalog-content\") pod \"community-operators-768n5\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.275022 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-utilities\") pod \"community-operators-768n5\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.376840 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-utilities\") pod \"community-operators-768n5\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.376934 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkdm7\" (UniqueName: \"kubernetes.io/projected/52961e17-f395-49a1-9e04-57e2dec35a9c-kube-api-access-fkdm7\") pod \"community-operators-768n5\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.376974 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-catalog-content\") pod \"community-operators-768n5\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.377582 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-catalog-content\") pod \"community-operators-768n5\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.378238 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-utilities\") pod \"community-operators-768n5\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.407833 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkdm7\" (UniqueName: \"kubernetes.io/projected/52961e17-f395-49a1-9e04-57e2dec35a9c-kube-api-access-fkdm7\") pod \"community-operators-768n5\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:54 crc kubenswrapper[4816]: I0216 15:37:54.524310 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-768n5" Feb 16 15:37:55 crc kubenswrapper[4816]: I0216 15:37:55.104092 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-768n5"] Feb 16 15:37:55 crc kubenswrapper[4816]: I0216 15:37:55.544406 4816 generic.go:334] "Generic (PLEG): container finished" podID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerID="8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502" exitCode=0 Feb 16 15:37:55 crc kubenswrapper[4816]: I0216 15:37:55.544674 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-768n5" event={"ID":"52961e17-f395-49a1-9e04-57e2dec35a9c","Type":"ContainerDied","Data":"8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502"} Feb 16 15:37:55 crc kubenswrapper[4816]: I0216 15:37:55.544701 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-768n5" event={"ID":"52961e17-f395-49a1-9e04-57e2dec35a9c","Type":"ContainerStarted","Data":"aaab27bd887bd097a4bd7d8469986258348285bdd66a65270a523b9871a6f9ee"} Feb 16 15:37:55 crc kubenswrapper[4816]: I0216 15:37:55.547148 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.735003 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6dxc2"] Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.740532 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.767563 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6dxc2"] Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.840278 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-catalog-content\") pod \"certified-operators-6dxc2\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.840330 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vn9w2\" (UniqueName: \"kubernetes.io/projected/11e71426-4407-4194-8787-b80d07aa81a5-kube-api-access-vn9w2\") pod \"certified-operators-6dxc2\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.840349 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-utilities\") pod \"certified-operators-6dxc2\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.942717 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-catalog-content\") pod \"certified-operators-6dxc2\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.942799 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vn9w2\" (UniqueName: \"kubernetes.io/projected/11e71426-4407-4194-8787-b80d07aa81a5-kube-api-access-vn9w2\") pod \"certified-operators-6dxc2\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.942825 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-utilities\") pod \"certified-operators-6dxc2\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.943647 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-utilities\") pod \"certified-operators-6dxc2\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.943725 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-catalog-content\") pod \"certified-operators-6dxc2\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:56 crc kubenswrapper[4816]: I0216 15:37:56.967758 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vn9w2\" (UniqueName: \"kubernetes.io/projected/11e71426-4407-4194-8787-b80d07aa81a5-kube-api-access-vn9w2\") pod \"certified-operators-6dxc2\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:57 crc kubenswrapper[4816]: I0216 15:37:57.183588 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:37:57 crc kubenswrapper[4816]: I0216 15:37:57.569915 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-768n5" event={"ID":"52961e17-f395-49a1-9e04-57e2dec35a9c","Type":"ContainerStarted","Data":"93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535"} Feb 16 15:37:57 crc kubenswrapper[4816]: I0216 15:37:57.676600 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6dxc2"] Feb 16 15:37:57 crc kubenswrapper[4816]: W0216 15:37:57.679258 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11e71426_4407_4194_8787_b80d07aa81a5.slice/crio-352b870cc2f4cfd6418df49b53cea7f3eb6dcb794de4b55c622f0cde5db080e8 WatchSource:0}: Error finding container 352b870cc2f4cfd6418df49b53cea7f3eb6dcb794de4b55c622f0cde5db080e8: Status 404 returned error can't find the container with id 352b870cc2f4cfd6418df49b53cea7f3eb6dcb794de4b55c622f0cde5db080e8 Feb 16 15:37:58 crc kubenswrapper[4816]: I0216 15:37:58.581407 4816 generic.go:334] "Generic (PLEG): container finished" podID="11e71426-4407-4194-8787-b80d07aa81a5" containerID="3b11085d3c4403bd77bb7141ecb57548b45670e5a5d68be74348eaf3882fea04" exitCode=0 Feb 16 15:37:58 crc kubenswrapper[4816]: I0216 15:37:58.581470 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6dxc2" event={"ID":"11e71426-4407-4194-8787-b80d07aa81a5","Type":"ContainerDied","Data":"3b11085d3c4403bd77bb7141ecb57548b45670e5a5d68be74348eaf3882fea04"} Feb 16 15:37:58 crc kubenswrapper[4816]: I0216 15:37:58.582020 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6dxc2" event={"ID":"11e71426-4407-4194-8787-b80d07aa81a5","Type":"ContainerStarted","Data":"352b870cc2f4cfd6418df49b53cea7f3eb6dcb794de4b55c622f0cde5db080e8"} Feb 16 15:37:59 crc kubenswrapper[4816]: I0216 15:37:59.597553 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6dxc2" event={"ID":"11e71426-4407-4194-8787-b80d07aa81a5","Type":"ContainerStarted","Data":"d1238b31071e4677ff3625d90bcee63ddf90ad7df244905dbf4ca6836a751e33"} Feb 16 15:37:59 crc kubenswrapper[4816]: I0216 15:37:59.602474 4816 generic.go:334] "Generic (PLEG): container finished" podID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerID="93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535" exitCode=0 Feb 16 15:37:59 crc kubenswrapper[4816]: I0216 15:37:59.602517 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-768n5" event={"ID":"52961e17-f395-49a1-9e04-57e2dec35a9c","Type":"ContainerDied","Data":"93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535"} Feb 16 15:38:00 crc kubenswrapper[4816]: I0216 15:38:00.613670 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-768n5" event={"ID":"52961e17-f395-49a1-9e04-57e2dec35a9c","Type":"ContainerStarted","Data":"49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f"} Feb 16 15:38:00 crc kubenswrapper[4816]: I0216 15:38:00.649132 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-768n5" podStartSLOduration=2.092815241 podStartE2EDuration="6.64906886s" podCreationTimestamp="2026-02-16 15:37:54 +0000 UTC" firstStartedPulling="2026-02-16 15:37:55.54686872 +0000 UTC m=+9274.873582448" lastFinishedPulling="2026-02-16 15:38:00.103122339 +0000 UTC m=+9279.429836067" observedRunningTime="2026-02-16 15:38:00.634223035 +0000 UTC m=+9279.960936783" watchObservedRunningTime="2026-02-16 15:38:00.64906886 +0000 UTC m=+9279.975782608" Feb 16 15:38:01 crc kubenswrapper[4816]: I0216 15:38:01.629780 4816 generic.go:334] "Generic (PLEG): container finished" podID="11e71426-4407-4194-8787-b80d07aa81a5" containerID="d1238b31071e4677ff3625d90bcee63ddf90ad7df244905dbf4ca6836a751e33" exitCode=0 Feb 16 15:38:01 crc kubenswrapper[4816]: I0216 15:38:01.629845 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6dxc2" event={"ID":"11e71426-4407-4194-8787-b80d07aa81a5","Type":"ContainerDied","Data":"d1238b31071e4677ff3625d90bcee63ddf90ad7df244905dbf4ca6836a751e33"} Feb 16 15:38:02 crc kubenswrapper[4816]: I0216 15:38:02.641205 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6dxc2" event={"ID":"11e71426-4407-4194-8787-b80d07aa81a5","Type":"ContainerStarted","Data":"b5aefb1621aaafc60e9d23374b3dd4ad862208341c183abacbb91cf74cc76158"} Feb 16 15:38:02 crc kubenswrapper[4816]: I0216 15:38:02.668218 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6dxc2" podStartSLOduration=3.093654694 podStartE2EDuration="6.668199075s" podCreationTimestamp="2026-02-16 15:37:56 +0000 UTC" firstStartedPulling="2026-02-16 15:37:58.583297903 +0000 UTC m=+9277.910011631" lastFinishedPulling="2026-02-16 15:38:02.157842284 +0000 UTC m=+9281.484556012" observedRunningTime="2026-02-16 15:38:02.657928165 +0000 UTC m=+9281.984641903" watchObservedRunningTime="2026-02-16 15:38:02.668199075 +0000 UTC m=+9281.994912803" Feb 16 15:38:04 crc kubenswrapper[4816]: I0216 15:38:04.525388 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-768n5" Feb 16 15:38:04 crc kubenswrapper[4816]: I0216 15:38:04.525742 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-768n5" Feb 16 15:38:05 crc kubenswrapper[4816]: I0216 15:38:05.584493 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-768n5" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerName="registry-server" probeResult="failure" output=< Feb 16 15:38:05 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 15:38:05 crc kubenswrapper[4816]: > Feb 16 15:38:07 crc kubenswrapper[4816]: I0216 15:38:07.184224 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:38:07 crc kubenswrapper[4816]: I0216 15:38:07.184489 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:38:07 crc kubenswrapper[4816]: I0216 15:38:07.260305 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:38:08 crc kubenswrapper[4816]: I0216 15:38:08.267924 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:38:08 crc kubenswrapper[4816]: I0216 15:38:08.342241 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6dxc2"] Feb 16 15:38:09 crc kubenswrapper[4816]: I0216 15:38:09.723041 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6dxc2" podUID="11e71426-4407-4194-8787-b80d07aa81a5" containerName="registry-server" containerID="cri-o://b5aefb1621aaafc60e9d23374b3dd4ad862208341c183abacbb91cf74cc76158" gracePeriod=2 Feb 16 15:38:10 crc kubenswrapper[4816]: I0216 15:38:10.738321 4816 generic.go:334] "Generic (PLEG): container finished" podID="11e71426-4407-4194-8787-b80d07aa81a5" containerID="b5aefb1621aaafc60e9d23374b3dd4ad862208341c183abacbb91cf74cc76158" exitCode=0 Feb 16 15:38:10 crc kubenswrapper[4816]: I0216 15:38:10.738669 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6dxc2" event={"ID":"11e71426-4407-4194-8787-b80d07aa81a5","Type":"ContainerDied","Data":"b5aefb1621aaafc60e9d23374b3dd4ad862208341c183abacbb91cf74cc76158"} Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.546923 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.581888 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vn9w2\" (UniqueName: \"kubernetes.io/projected/11e71426-4407-4194-8787-b80d07aa81a5-kube-api-access-vn9w2\") pod \"11e71426-4407-4194-8787-b80d07aa81a5\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.582425 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-catalog-content\") pod \"11e71426-4407-4194-8787-b80d07aa81a5\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.582543 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-utilities\") pod \"11e71426-4407-4194-8787-b80d07aa81a5\" (UID: \"11e71426-4407-4194-8787-b80d07aa81a5\") " Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.583814 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-utilities" (OuterVolumeSpecName: "utilities") pod "11e71426-4407-4194-8787-b80d07aa81a5" (UID: "11e71426-4407-4194-8787-b80d07aa81a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.593001 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11e71426-4407-4194-8787-b80d07aa81a5-kube-api-access-vn9w2" (OuterVolumeSpecName: "kube-api-access-vn9w2") pod "11e71426-4407-4194-8787-b80d07aa81a5" (UID: "11e71426-4407-4194-8787-b80d07aa81a5"). InnerVolumeSpecName "kube-api-access-vn9w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.644612 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11e71426-4407-4194-8787-b80d07aa81a5" (UID: "11e71426-4407-4194-8787-b80d07aa81a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.685811 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vn9w2\" (UniqueName: \"kubernetes.io/projected/11e71426-4407-4194-8787-b80d07aa81a5-kube-api-access-vn9w2\") on node \"crc\" DevicePath \"\"" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.685874 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.685891 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11e71426-4407-4194-8787-b80d07aa81a5-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.751757 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6dxc2" event={"ID":"11e71426-4407-4194-8787-b80d07aa81a5","Type":"ContainerDied","Data":"352b870cc2f4cfd6418df49b53cea7f3eb6dcb794de4b55c622f0cde5db080e8"} Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.751828 4816 scope.go:117] "RemoveContainer" containerID="b5aefb1621aaafc60e9d23374b3dd4ad862208341c183abacbb91cf74cc76158" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.752025 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6dxc2" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.785415 4816 scope.go:117] "RemoveContainer" containerID="d1238b31071e4677ff3625d90bcee63ddf90ad7df244905dbf4ca6836a751e33" Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.800547 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6dxc2"] Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.816231 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6dxc2"] Feb 16 15:38:11 crc kubenswrapper[4816]: I0216 15:38:11.858712 4816 scope.go:117] "RemoveContainer" containerID="3b11085d3c4403bd77bb7141ecb57548b45670e5a5d68be74348eaf3882fea04" Feb 16 15:38:13 crc kubenswrapper[4816]: I0216 15:38:13.416071 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11e71426-4407-4194-8787-b80d07aa81a5" path="/var/lib/kubelet/pods/11e71426-4407-4194-8787-b80d07aa81a5/volumes" Feb 16 15:38:14 crc kubenswrapper[4816]: I0216 15:38:14.599756 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-768n5" Feb 16 15:38:14 crc kubenswrapper[4816]: I0216 15:38:14.654474 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-768n5" Feb 16 15:38:14 crc kubenswrapper[4816]: I0216 15:38:14.837269 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-768n5"] Feb 16 15:38:15 crc kubenswrapper[4816]: I0216 15:38:15.785854 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-768n5" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerName="registry-server" containerID="cri-o://49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f" gracePeriod=2 Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.295703 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-768n5" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.389755 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkdm7\" (UniqueName: \"kubernetes.io/projected/52961e17-f395-49a1-9e04-57e2dec35a9c-kube-api-access-fkdm7\") pod \"52961e17-f395-49a1-9e04-57e2dec35a9c\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.389857 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-catalog-content\") pod \"52961e17-f395-49a1-9e04-57e2dec35a9c\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.389929 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-utilities\") pod \"52961e17-f395-49a1-9e04-57e2dec35a9c\" (UID: \"52961e17-f395-49a1-9e04-57e2dec35a9c\") " Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.390846 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-utilities" (OuterVolumeSpecName: "utilities") pod "52961e17-f395-49a1-9e04-57e2dec35a9c" (UID: "52961e17-f395-49a1-9e04-57e2dec35a9c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.405680 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52961e17-f395-49a1-9e04-57e2dec35a9c-kube-api-access-fkdm7" (OuterVolumeSpecName: "kube-api-access-fkdm7") pod "52961e17-f395-49a1-9e04-57e2dec35a9c" (UID: "52961e17-f395-49a1-9e04-57e2dec35a9c"). InnerVolumeSpecName "kube-api-access-fkdm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.494199 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkdm7\" (UniqueName: \"kubernetes.io/projected/52961e17-f395-49a1-9e04-57e2dec35a9c-kube-api-access-fkdm7\") on node \"crc\" DevicePath \"\"" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.494241 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.499720 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "52961e17-f395-49a1-9e04-57e2dec35a9c" (UID: "52961e17-f395-49a1-9e04-57e2dec35a9c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.596683 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52961e17-f395-49a1-9e04-57e2dec35a9c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.802526 4816 generic.go:334] "Generic (PLEG): container finished" podID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerID="49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f" exitCode=0 Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.803700 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-768n5" event={"ID":"52961e17-f395-49a1-9e04-57e2dec35a9c","Type":"ContainerDied","Data":"49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f"} Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.803884 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-768n5" event={"ID":"52961e17-f395-49a1-9e04-57e2dec35a9c","Type":"ContainerDied","Data":"aaab27bd887bd097a4bd7d8469986258348285bdd66a65270a523b9871a6f9ee"} Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.803983 4816 scope.go:117] "RemoveContainer" containerID="49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.804245 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-768n5" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.876474 4816 scope.go:117] "RemoveContainer" containerID="93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.882059 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-768n5"] Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.901043 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-768n5"] Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.922920 4816 scope.go:117] "RemoveContainer" containerID="8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.944354 4816 scope.go:117] "RemoveContainer" containerID="49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f" Feb 16 15:38:16 crc kubenswrapper[4816]: E0216 15:38:16.946146 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f\": container with ID starting with 49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f not found: ID does not exist" containerID="49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.946198 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f"} err="failed to get container status \"49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f\": rpc error: code = NotFound desc = could not find container \"49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f\": container with ID starting with 49aaffb4bae7ef4b10b36f98a570f300861937af3ec2eb5445d92db22e0faa9f not found: ID does not exist" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.946224 4816 scope.go:117] "RemoveContainer" containerID="93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535" Feb 16 15:38:16 crc kubenswrapper[4816]: E0216 15:38:16.946520 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535\": container with ID starting with 93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535 not found: ID does not exist" containerID="93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.946552 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535"} err="failed to get container status \"93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535\": rpc error: code = NotFound desc = could not find container \"93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535\": container with ID starting with 93f6cb32b05dc7ef147b79d885a57ac9329e3e3f0d385e7354d4f3987b0a3535 not found: ID does not exist" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.946574 4816 scope.go:117] "RemoveContainer" containerID="8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502" Feb 16 15:38:16 crc kubenswrapper[4816]: E0216 15:38:16.946881 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502\": container with ID starting with 8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502 not found: ID does not exist" containerID="8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502" Feb 16 15:38:16 crc kubenswrapper[4816]: I0216 15:38:16.946939 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502"} err="failed to get container status \"8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502\": rpc error: code = NotFound desc = could not find container \"8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502\": container with ID starting with 8f3863e6290c5df95a13b8c85e6df52a617f16a0e5236b44a8ba8f2b25071502 not found: ID does not exist" Feb 16 15:38:17 crc kubenswrapper[4816]: I0216 15:38:17.420201 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" path="/var/lib/kubelet/pods/52961e17-f395-49a1-9e04-57e2dec35a9c/volumes" Feb 16 15:39:06 crc kubenswrapper[4816]: I0216 15:39:06.940241 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:39:06 crc kubenswrapper[4816]: I0216 15:39:06.940714 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:39:36 crc kubenswrapper[4816]: I0216 15:39:36.941417 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:39:36 crc kubenswrapper[4816]: I0216 15:39:36.942148 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:40:06 crc kubenswrapper[4816]: I0216 15:40:06.940939 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:40:06 crc kubenswrapper[4816]: I0216 15:40:06.941563 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:40:06 crc kubenswrapper[4816]: I0216 15:40:06.941640 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 15:40:06 crc kubenswrapper[4816]: I0216 15:40:06.942732 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d0e2ebe7423c9baf282febac1b199f56648fc3853dd1643a2793afe6c7da1d19"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 15:40:06 crc kubenswrapper[4816]: I0216 15:40:06.942809 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://d0e2ebe7423c9baf282febac1b199f56648fc3853dd1643a2793afe6c7da1d19" gracePeriod=600 Feb 16 15:40:07 crc kubenswrapper[4816]: I0216 15:40:07.203976 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="d0e2ebe7423c9baf282febac1b199f56648fc3853dd1643a2793afe6c7da1d19" exitCode=0 Feb 16 15:40:07 crc kubenswrapper[4816]: I0216 15:40:07.204025 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"d0e2ebe7423c9baf282febac1b199f56648fc3853dd1643a2793afe6c7da1d19"} Feb 16 15:40:07 crc kubenswrapper[4816]: I0216 15:40:07.204442 4816 scope.go:117] "RemoveContainer" containerID="59a351906a47dbff4d563841c3babecaaa13d693d63bec6e7795f49515ddfa64" Feb 16 15:40:08 crc kubenswrapper[4816]: I0216 15:40:08.222374 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca"} Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.844504 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g9qf7"] Feb 16 15:41:07 crc kubenswrapper[4816]: E0216 15:41:07.845677 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerName="extract-utilities" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.845704 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerName="extract-utilities" Feb 16 15:41:07 crc kubenswrapper[4816]: E0216 15:41:07.845722 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11e71426-4407-4194-8787-b80d07aa81a5" containerName="extract-utilities" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.845729 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="11e71426-4407-4194-8787-b80d07aa81a5" containerName="extract-utilities" Feb 16 15:41:07 crc kubenswrapper[4816]: E0216 15:41:07.845745 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11e71426-4407-4194-8787-b80d07aa81a5" containerName="extract-content" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.845753 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="11e71426-4407-4194-8787-b80d07aa81a5" containerName="extract-content" Feb 16 15:41:07 crc kubenswrapper[4816]: E0216 15:41:07.845782 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11e71426-4407-4194-8787-b80d07aa81a5" containerName="registry-server" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.845790 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="11e71426-4407-4194-8787-b80d07aa81a5" containerName="registry-server" Feb 16 15:41:07 crc kubenswrapper[4816]: E0216 15:41:07.845818 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerName="extract-content" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.845827 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerName="extract-content" Feb 16 15:41:07 crc kubenswrapper[4816]: E0216 15:41:07.845865 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerName="registry-server" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.845876 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerName="registry-server" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.846125 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="11e71426-4407-4194-8787-b80d07aa81a5" containerName="registry-server" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.846158 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="52961e17-f395-49a1-9e04-57e2dec35a9c" containerName="registry-server" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.848189 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.866235 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9qf7"] Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.947308 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-utilities\") pod \"redhat-marketplace-g9qf7\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.947389 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-catalog-content\") pod \"redhat-marketplace-g9qf7\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:07 crc kubenswrapper[4816]: I0216 15:41:07.947457 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwzch\" (UniqueName: \"kubernetes.io/projected/898a6563-2c99-4227-aada-f51751eeb62c-kube-api-access-pwzch\") pod \"redhat-marketplace-g9qf7\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:08 crc kubenswrapper[4816]: I0216 15:41:08.050117 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-utilities\") pod \"redhat-marketplace-g9qf7\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:08 crc kubenswrapper[4816]: I0216 15:41:08.050463 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-catalog-content\") pod \"redhat-marketplace-g9qf7\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:08 crc kubenswrapper[4816]: I0216 15:41:08.050627 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwzch\" (UniqueName: \"kubernetes.io/projected/898a6563-2c99-4227-aada-f51751eeb62c-kube-api-access-pwzch\") pod \"redhat-marketplace-g9qf7\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:08 crc kubenswrapper[4816]: I0216 15:41:08.050852 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-utilities\") pod \"redhat-marketplace-g9qf7\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:08 crc kubenswrapper[4816]: I0216 15:41:08.051149 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-catalog-content\") pod \"redhat-marketplace-g9qf7\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:08 crc kubenswrapper[4816]: I0216 15:41:08.079691 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwzch\" (UniqueName: \"kubernetes.io/projected/898a6563-2c99-4227-aada-f51751eeb62c-kube-api-access-pwzch\") pod \"redhat-marketplace-g9qf7\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:08 crc kubenswrapper[4816]: I0216 15:41:08.195426 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:08 crc kubenswrapper[4816]: I0216 15:41:08.705275 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9qf7"] Feb 16 15:41:08 crc kubenswrapper[4816]: W0216 15:41:08.728857 4816 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod898a6563_2c99_4227_aada_f51751eeb62c.slice/crio-ed7efa3020892d9be054df6050e684b31d43faf40b4e4f51b4d85200fce0806e WatchSource:0}: Error finding container ed7efa3020892d9be054df6050e684b31d43faf40b4e4f51b4d85200fce0806e: Status 404 returned error can't find the container with id ed7efa3020892d9be054df6050e684b31d43faf40b4e4f51b4d85200fce0806e Feb 16 15:41:08 crc kubenswrapper[4816]: I0216 15:41:08.952893 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9qf7" event={"ID":"898a6563-2c99-4227-aada-f51751eeb62c","Type":"ContainerStarted","Data":"ed7efa3020892d9be054df6050e684b31d43faf40b4e4f51b4d85200fce0806e"} Feb 16 15:41:09 crc kubenswrapper[4816]: I0216 15:41:09.968109 4816 generic.go:334] "Generic (PLEG): container finished" podID="898a6563-2c99-4227-aada-f51751eeb62c" containerID="084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10" exitCode=0 Feb 16 15:41:09 crc kubenswrapper[4816]: I0216 15:41:09.968206 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9qf7" event={"ID":"898a6563-2c99-4227-aada-f51751eeb62c","Type":"ContainerDied","Data":"084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10"} Feb 16 15:41:12 crc kubenswrapper[4816]: I0216 15:41:12.598341 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9qf7" event={"ID":"898a6563-2c99-4227-aada-f51751eeb62c","Type":"ContainerStarted","Data":"b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5"} Feb 16 15:41:13 crc kubenswrapper[4816]: I0216 15:41:13.609045 4816 generic.go:334] "Generic (PLEG): container finished" podID="898a6563-2c99-4227-aada-f51751eeb62c" containerID="b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5" exitCode=0 Feb 16 15:41:13 crc kubenswrapper[4816]: I0216 15:41:13.609089 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9qf7" event={"ID":"898a6563-2c99-4227-aada-f51751eeb62c","Type":"ContainerDied","Data":"b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5"} Feb 16 15:41:15 crc kubenswrapper[4816]: I0216 15:41:15.636874 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9qf7" event={"ID":"898a6563-2c99-4227-aada-f51751eeb62c","Type":"ContainerStarted","Data":"fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7"} Feb 16 15:41:15 crc kubenswrapper[4816]: I0216 15:41:15.666909 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g9qf7" podStartSLOduration=4.665282218 podStartE2EDuration="8.666844018s" podCreationTimestamp="2026-02-16 15:41:07 +0000 UTC" firstStartedPulling="2026-02-16 15:41:09.970681469 +0000 UTC m=+9469.297395217" lastFinishedPulling="2026-02-16 15:41:13.972243269 +0000 UTC m=+9473.298957017" observedRunningTime="2026-02-16 15:41:15.656249069 +0000 UTC m=+9474.982962817" watchObservedRunningTime="2026-02-16 15:41:15.666844018 +0000 UTC m=+9474.993557746" Feb 16 15:41:18 crc kubenswrapper[4816]: I0216 15:41:18.196476 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:18 crc kubenswrapper[4816]: I0216 15:41:18.198095 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:18 crc kubenswrapper[4816]: I0216 15:41:18.266464 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:19 crc kubenswrapper[4816]: I0216 15:41:19.736633 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:19 crc kubenswrapper[4816]: I0216 15:41:19.804847 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9qf7"] Feb 16 15:41:21 crc kubenswrapper[4816]: I0216 15:41:21.701004 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g9qf7" podUID="898a6563-2c99-4227-aada-f51751eeb62c" containerName="registry-server" containerID="cri-o://fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7" gracePeriod=2 Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.301460 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.403932 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-catalog-content\") pod \"898a6563-2c99-4227-aada-f51751eeb62c\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.403967 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwzch\" (UniqueName: \"kubernetes.io/projected/898a6563-2c99-4227-aada-f51751eeb62c-kube-api-access-pwzch\") pod \"898a6563-2c99-4227-aada-f51751eeb62c\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.404047 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-utilities\") pod \"898a6563-2c99-4227-aada-f51751eeb62c\" (UID: \"898a6563-2c99-4227-aada-f51751eeb62c\") " Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.405724 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-utilities" (OuterVolumeSpecName: "utilities") pod "898a6563-2c99-4227-aada-f51751eeb62c" (UID: "898a6563-2c99-4227-aada-f51751eeb62c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.409800 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/898a6563-2c99-4227-aada-f51751eeb62c-kube-api-access-pwzch" (OuterVolumeSpecName: "kube-api-access-pwzch") pod "898a6563-2c99-4227-aada-f51751eeb62c" (UID: "898a6563-2c99-4227-aada-f51751eeb62c"). InnerVolumeSpecName "kube-api-access-pwzch". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.437243 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "898a6563-2c99-4227-aada-f51751eeb62c" (UID: "898a6563-2c99-4227-aada-f51751eeb62c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.507433 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.507472 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/898a6563-2c99-4227-aada-f51751eeb62c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.507508 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwzch\" (UniqueName: \"kubernetes.io/projected/898a6563-2c99-4227-aada-f51751eeb62c-kube-api-access-pwzch\") on node \"crc\" DevicePath \"\"" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.715043 4816 generic.go:334] "Generic (PLEG): container finished" podID="898a6563-2c99-4227-aada-f51751eeb62c" containerID="fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7" exitCode=0 Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.715103 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9qf7" event={"ID":"898a6563-2c99-4227-aada-f51751eeb62c","Type":"ContainerDied","Data":"fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7"} Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.715131 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9qf7" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.715170 4816 scope.go:117] "RemoveContainer" containerID="fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.715150 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9qf7" event={"ID":"898a6563-2c99-4227-aada-f51751eeb62c","Type":"ContainerDied","Data":"ed7efa3020892d9be054df6050e684b31d43faf40b4e4f51b4d85200fce0806e"} Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.748315 4816 scope.go:117] "RemoveContainer" containerID="b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.777742 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9qf7"] Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.784178 4816 scope.go:117] "RemoveContainer" containerID="084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.790964 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9qf7"] Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.807089 4816 scope.go:117] "RemoveContainer" containerID="fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7" Feb 16 15:41:22 crc kubenswrapper[4816]: E0216 15:41:22.807636 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7\": container with ID starting with fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7 not found: ID does not exist" containerID="fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.807727 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7"} err="failed to get container status \"fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7\": rpc error: code = NotFound desc = could not find container \"fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7\": container with ID starting with fed61c6411132263d6ad454595f9d3471236d138defcd39df2bc8d79484538d7 not found: ID does not exist" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.807774 4816 scope.go:117] "RemoveContainer" containerID="b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5" Feb 16 15:41:22 crc kubenswrapper[4816]: E0216 15:41:22.808326 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5\": container with ID starting with b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5 not found: ID does not exist" containerID="b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.808416 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5"} err="failed to get container status \"b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5\": rpc error: code = NotFound desc = could not find container \"b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5\": container with ID starting with b9be9336d66fe6c981909687f2928795c83333c9c676954dd28be036bbb3d3a5 not found: ID does not exist" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.808493 4816 scope.go:117] "RemoveContainer" containerID="084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10" Feb 16 15:41:22 crc kubenswrapper[4816]: E0216 15:41:22.808849 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10\": container with ID starting with 084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10 not found: ID does not exist" containerID="084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10" Feb 16 15:41:22 crc kubenswrapper[4816]: I0216 15:41:22.808944 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10"} err="failed to get container status \"084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10\": rpc error: code = NotFound desc = could not find container \"084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10\": container with ID starting with 084fd919aa8ca11618e237ccd93b07e52da9fc38e193549c284469ae07408f10 not found: ID does not exist" Feb 16 15:41:23 crc kubenswrapper[4816]: I0216 15:41:23.414486 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="898a6563-2c99-4227-aada-f51751eeb62c" path="/var/lib/kubelet/pods/898a6563-2c99-4227-aada-f51751eeb62c/volumes" Feb 16 15:41:49 crc kubenswrapper[4816]: I0216 15:41:49.550975 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_01d77692-9dbe-4e2c-8b09-f7ce7a86efec/init-config-reloader/0.log" Feb 16 15:41:49 crc kubenswrapper[4816]: I0216 15:41:49.766459 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_01d77692-9dbe-4e2c-8b09-f7ce7a86efec/alertmanager/0.log" Feb 16 15:41:49 crc kubenswrapper[4816]: I0216 15:41:49.852992 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_01d77692-9dbe-4e2c-8b09-f7ce7a86efec/init-config-reloader/0.log" Feb 16 15:41:49 crc kubenswrapper[4816]: I0216 15:41:49.895944 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_01d77692-9dbe-4e2c-8b09-f7ce7a86efec/config-reloader/0.log" Feb 16 15:41:50 crc kubenswrapper[4816]: I0216 15:41:50.475933 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_7f425268-34f3-4db6-a4e7-b806242b8264/aodh-api/0.log" Feb 16 15:41:50 crc kubenswrapper[4816]: I0216 15:41:50.525979 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_7f425268-34f3-4db6-a4e7-b806242b8264/aodh-listener/0.log" Feb 16 15:41:50 crc kubenswrapper[4816]: I0216 15:41:50.551635 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_7f425268-34f3-4db6-a4e7-b806242b8264/aodh-evaluator/0.log" Feb 16 15:41:50 crc kubenswrapper[4816]: I0216 15:41:50.563766 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_7f425268-34f3-4db6-a4e7-b806242b8264/aodh-notifier/0.log" Feb 16 15:41:50 crc kubenswrapper[4816]: I0216 15:41:50.744398 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64684f6b78-j5wzl_51985c5a-afd8-4005-a494-403d673f7b2b/barbican-api/0.log" Feb 16 15:41:50 crc kubenswrapper[4816]: I0216 15:41:50.773909 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64684f6b78-j5wzl_51985c5a-afd8-4005-a494-403d673f7b2b/barbican-api-log/0.log" Feb 16 15:41:50 crc kubenswrapper[4816]: I0216 15:41:50.934734 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-c76b849c8-6wvld_f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b/barbican-keystone-listener/0.log" Feb 16 15:41:50 crc kubenswrapper[4816]: I0216 15:41:50.988218 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-c76b849c8-6wvld_f8af86d5-7436-4e20-b9b0-9b39f9f2bd6b/barbican-keystone-listener-log/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.093511 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-68897db679-vhq8b_8fb56667-c039-4934-9c5e-6d4a740b0a7e/barbican-worker/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.131844 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-68897db679-vhq8b_8fb56667-c039-4934-9c5e-6d4a740b0a7e/barbican-worker-log/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.262300 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-kdxx4_6618881a-03b6-4eec-b4e5-850bbce3a0ce/bootstrap-openstack-openstack-cell1/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.366720 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_720b8c85-0246-4259-9e80-35a4fd4c7242/ceilometer-central-agent/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.449449 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_720b8c85-0246-4259-9e80-35a4fd4c7242/ceilometer-notification-agent/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.468816 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_720b8c85-0246-4259-9e80-35a4fd4c7242/proxy-httpd/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.539983 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_720b8c85-0246-4259-9e80-35a4fd4c7242/sg-core/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.660007 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-pkfsl_1cdc4b89-7e53-4a42-8d37-afbf8b153f9a/ceph-client-openstack-openstack-cell1/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.864981 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0135ee69-6313-403e-97f7-7675511cc726/cinder-api/0.log" Feb 16 15:41:51 crc kubenswrapper[4816]: I0216 15:41:51.876620 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0135ee69-6313-403e-97f7-7675511cc726/cinder-api-log/0.log" Feb 16 15:41:52 crc kubenswrapper[4816]: I0216 15:41:52.199888 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_e0a0b2de-affb-4954-bb88-fbc263a54b06/probe/0.log" Feb 16 15:41:52 crc kubenswrapper[4816]: I0216 15:41:52.203033 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_e0a0b2de-affb-4954-bb88-fbc263a54b06/cinder-backup/0.log" Feb 16 15:41:52 crc kubenswrapper[4816]: I0216 15:41:52.310487 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_fd608722-b67c-4a82-a808-68519c126126/cinder-scheduler/0.log" Feb 16 15:41:52 crc kubenswrapper[4816]: I0216 15:41:52.472615 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_fd608722-b67c-4a82-a808-68519c126126/probe/0.log" Feb 16 15:41:52 crc kubenswrapper[4816]: I0216 15:41:52.610742 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_7c0c2726-3681-43b2-8697-230ab9f116c3/probe/0.log" Feb 16 15:41:52 crc kubenswrapper[4816]: I0216 15:41:52.641358 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_7c0c2726-3681-43b2-8697-230ab9f116c3/cinder-volume/0.log" Feb 16 15:41:52 crc kubenswrapper[4816]: I0216 15:41:52.769479 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-xk7gv_e961c0a2-5b1d-4f14-975f-ec640a801439/configure-network-openstack-openstack-cell1/0.log" Feb 16 15:41:52 crc kubenswrapper[4816]: I0216 15:41:52.838150 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-vwcgn_34da2db9-996b-4f0a-af6b-3b230f5b8a0a/configure-os-openstack-openstack-cell1/0.log" Feb 16 15:41:52 crc kubenswrapper[4816]: I0216 15:41:52.938005 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5644db77f7-8gd7t_4c717d50-6be2-467f-b637-873c00617e0e/init/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.260469 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5644db77f7-8gd7t_4c717d50-6be2-467f-b637-873c00617e0e/init/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.266358 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5644db77f7-8gd7t_4c717d50-6be2-467f-b637-873c00617e0e/dnsmasq-dns/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.274373 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-gbsjp_ce44ed35-23a8-40ad-ac4a-4925f19d14ec/download-cache-openstack-openstack-cell1/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.462582 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_8dc52345-93e6-4c65-9617-f4dc9bdd5871/glance-log/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.479038 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_8dc52345-93e6-4c65-9617-f4dc9bdd5871/glance-httpd/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.525964 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_eea609be-3e59-43a4-8a67-5c3f5a427489/glance-log/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.539520 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_eea609be-3e59-43a4-8a67-5c3f5a427489/glance-httpd/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.788616 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-6f897687c8-lnh6w_6bae665e-4901-41cf-bc7e-2b47de9b6429/heat-api/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.891532 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-5cfbdfb845-kzxzs_a1efa856-88c3-4311-9634-7370c2a2db47/heat-cfnapi/0.log" Feb 16 15:41:53 crc kubenswrapper[4816]: I0216 15:41:53.946310 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-67fc45c8f8-s7bjc_fcfe17aa-f8a3-46c2-9e93-6b9b147e415a/heat-engine/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.109020 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-76546b4c8f-lxsz6_545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554/horizon/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.122311 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-76546b4c8f-lxsz6_545b9e47-f0dc-4ca5-9b8e-c4ff7b53e554/horizon-log/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.173881 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-bfqhj_e7eb1a12-445f-49cf-9958-d9cdccd07352/install-certs-openstack-openstack-cell1/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.317051 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-24tb8_78140e20-6208-46cc-a7a5-a64aa3d1dee2/install-os-openstack-openstack-cell1/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.580109 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-c89c9568d-8xkdd_0b6512c2-dc55-4f1d-baf1-d1e901f90a07/keystone-api/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.631014 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29520901-wgdcv_4345a6a3-62a1-40c1-a611-37289fe170fd/keystone-cron/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.679541 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_7e296da6-30fd-4fea-8dd5-c36394ca1c1c/kube-state-metrics/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.768857 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-mjw59_d16b3ac3-df67-4ab9-9585-e121d8aababc/libvirt-openstack-openstack-cell1/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.972194 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff/manila-api-log/0.log" Feb 16 15:41:54 crc kubenswrapper[4816]: I0216 15:41:54.991537 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_9580fee4-36fc-4e7b-8c12-bdaa7b8b40ff/manila-api/0.log" Feb 16 15:41:55 crc kubenswrapper[4816]: I0216 15:41:55.087187 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_9f46265c-6ed1-44e2-b935-ed26dda53d9d/manila-scheduler/0.log" Feb 16 15:41:55 crc kubenswrapper[4816]: I0216 15:41:55.237839 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_c993b595-71ab-4e33-83ea-3c1954491d41/manila-share/0.log" Feb 16 15:41:55 crc kubenswrapper[4816]: I0216 15:41:55.257230 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_9f46265c-6ed1-44e2-b935-ed26dda53d9d/probe/0.log" Feb 16 15:41:55 crc kubenswrapper[4816]: I0216 15:41:55.270917 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_c993b595-71ab-4e33-83ea-3c1954491d41/probe/0.log" Feb 16 15:41:55 crc kubenswrapper[4816]: I0216 15:41:55.587060 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-645bb559fc-bxrrg_dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e/neutron-api/0.log" Feb 16 15:41:55 crc kubenswrapper[4816]: I0216 15:41:55.628675 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-645bb559fc-bxrrg_dc3b9126-7c70-4e95-9f82-0b0d37fa7e1e/neutron-httpd/0.log" Feb 16 15:41:55 crc kubenswrapper[4816]: I0216 15:41:55.837506 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-gvbjt_4c98be45-7f99-41f4-93dd-f57aa565492f/neutron-dhcp-openstack-openstack-cell1/0.log" Feb 16 15:41:55 crc kubenswrapper[4816]: I0216 15:41:55.879593 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-tggb5_488534f7-df72-4134-8aef-f7812bb0a497/neutron-metadata-openstack-openstack-cell1/0.log" Feb 16 15:41:56 crc kubenswrapper[4816]: I0216 15:41:56.072344 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-pzqvv_77aba700-c238-4ca0-94cf-f596d763e1a2/neutron-sriov-openstack-openstack-cell1/0.log" Feb 16 15:41:56 crc kubenswrapper[4816]: I0216 15:41:56.203864 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_28fcfa43-069c-4810-9362-611b963f3fca/nova-api-api/0.log" Feb 16 15:41:56 crc kubenswrapper[4816]: I0216 15:41:56.453631 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_28fcfa43-069c-4810-9362-611b963f3fca/nova-api-log/0.log" Feb 16 15:41:56 crc kubenswrapper[4816]: I0216 15:41:56.479606 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ceb6d00c-cca7-4f36-b688-356cc8bc83ab/nova-cell0-conductor-conductor/0.log" Feb 16 15:41:57 crc kubenswrapper[4816]: I0216 15:41:57.005842 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_be2b417b-fe2d-4d12-8c42-13fa7587f1fa/nova-cell1-conductor-conductor/0.log" Feb 16 15:41:57 crc kubenswrapper[4816]: I0216 15:41:57.008321 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_1e9fd6ce-2013-406b-a1a9-b9c948f0dca7/nova-cell1-novncproxy-novncproxy/0.log" Feb 16 15:41:57 crc kubenswrapper[4816]: I0216 15:41:57.240396 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celln4bgn_600c3f65-ad0a-41fe-9fe8-8cc2870bdf9f/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Feb 16 15:41:57 crc kubenswrapper[4816]: I0216 15:41:57.276632 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-sxj25_17ce2002-16df-42d4-b9c3-e9bc15ad020f/nova-cell1-openstack-openstack-cell1/0.log" Feb 16 15:41:57 crc kubenswrapper[4816]: I0216 15:41:57.491876 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5d4b4085-0c30-40ad-9941-241c956955b7/nova-metadata-log/0.log" Feb 16 15:41:57 crc kubenswrapper[4816]: I0216 15:41:57.575016 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5d4b4085-0c30-40ad-9941-241c956955b7/nova-metadata-metadata/0.log" Feb 16 15:41:57 crc kubenswrapper[4816]: I0216 15:41:57.729794 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_5f7413d4-1b7b-484f-9b5d-a615a36bc1a2/nova-scheduler-scheduler/0.log" Feb 16 15:41:58 crc kubenswrapper[4816]: I0216 15:41:58.256016 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-7859468b99-2chct_3f4eb8fe-9b0e-440f-9013-9af01736386f/init/0.log" Feb 16 15:41:58 crc kubenswrapper[4816]: I0216 15:41:58.491864 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-7859468b99-2chct_3f4eb8fe-9b0e-440f-9013-9af01736386f/init/0.log" Feb 16 15:41:58 crc kubenswrapper[4816]: I0216 15:41:58.642251 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-7859468b99-2chct_3f4eb8fe-9b0e-440f-9013-9af01736386f/octavia-api-provider-agent/0.log" Feb 16 15:41:58 crc kubenswrapper[4816]: I0216 15:41:58.701880 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-8vhbw_279f09b2-16a7-4be2-a83b-dba7b3794fd3/init/0.log" Feb 16 15:41:58 crc kubenswrapper[4816]: I0216 15:41:58.769979 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-7859468b99-2chct_3f4eb8fe-9b0e-440f-9013-9af01736386f/octavia-api/0.log" Feb 16 15:41:58 crc kubenswrapper[4816]: I0216 15:41:58.885905 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-8vhbw_279f09b2-16a7-4be2-a83b-dba7b3794fd3/init/0.log" Feb 16 15:41:58 crc kubenswrapper[4816]: I0216 15:41:58.997128 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-4xd2x_4e86a794-696e-45be-b7fc-5a9df7c3bab5/init/0.log" Feb 16 15:41:59 crc kubenswrapper[4816]: I0216 15:41:59.004927 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-8vhbw_279f09b2-16a7-4be2-a83b-dba7b3794fd3/octavia-healthmanager/0.log" Feb 16 15:41:59 crc kubenswrapper[4816]: I0216 15:41:59.158743 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-4xd2x_4e86a794-696e-45be-b7fc-5a9df7c3bab5/init/0.log" Feb 16 15:41:59 crc kubenswrapper[4816]: I0216 15:41:59.240451 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-4xd2x_4e86a794-696e-45be-b7fc-5a9df7c3bab5/octavia-housekeeping/0.log" Feb 16 15:41:59 crc kubenswrapper[4816]: I0216 15:41:59.292527 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-59f8cff499-g6rrq_cf336b82-22b2-4f4b-a120-d0141487192f/init/0.log" Feb 16 15:41:59 crc kubenswrapper[4816]: I0216 15:41:59.455966 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-59f8cff499-g6rrq_cf336b82-22b2-4f4b-a120-d0141487192f/octavia-amphora-httpd/0.log" Feb 16 15:41:59 crc kubenswrapper[4816]: I0216 15:41:59.460506 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-59f8cff499-g6rrq_cf336b82-22b2-4f4b-a120-d0141487192f/init/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.025819 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-2brcl_dda86147-89af-40b9-ad51-1d952a483747/init/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.238291 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-2brcl_dda86147-89af-40b9-ad51-1d952a483747/init/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.323117 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-2brcl_dda86147-89af-40b9-ad51-1d952a483747/octavia-rsyslog/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.342572 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-w4cm6_9982c371-5ece-4660-bc85-25e726887e29/init/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.513454 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-w4cm6_9982c371-5ece-4660-bc85-25e726887e29/init/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.637755 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_2152ebde-94a5-401a-a9c0-f2ca76c5a16e/mysql-bootstrap/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.696826 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-w4cm6_9982c371-5ece-4660-bc85-25e726887e29/octavia-worker/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.848559 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_2152ebde-94a5-401a-a9c0-f2ca76c5a16e/mysql-bootstrap/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.881811 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_2152ebde-94a5-401a-a9c0-f2ca76c5a16e/galera/0.log" Feb 16 15:42:00 crc kubenswrapper[4816]: I0216 15:42:00.953834 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8fbda533-421c-4e67-8f65-4970f0c27924/mysql-bootstrap/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.120575 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8fbda533-421c-4e67-8f65-4970f0c27924/mysql-bootstrap/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.183472 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8fbda533-421c-4e67-8f65-4970f0c27924/galera/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.224357 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_689b4789-2ea7-46da-930f-bf92141f0845/openstackclient/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.371251 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-lt54b_ded75e4e-2e4e-487c-a78d-1029edcba7e6/ovn-controller/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.508164 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tb2n8_20fdd2c1-4a44-4d6f-ae14-f11d556f35f7/openstack-network-exporter/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.632764 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-v95mf_9a78f714-cb33-4f68-a282-ab390b744153/ovsdb-server-init/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.807490 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-v95mf_9a78f714-cb33-4f68-a282-ab390b744153/ovsdb-server/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.812510 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-v95mf_9a78f714-cb33-4f68-a282-ab390b744153/ovs-vswitchd/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.820449 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-v95mf_9a78f714-cb33-4f68-a282-ab390b744153/ovsdb-server-init/0.log" Feb 16 15:42:01 crc kubenswrapper[4816]: I0216 15:42:01.992537 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_38d8bf2c-034a-4755-a90e-8bb4c0d7d55c/ovn-northd/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.044579 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_38d8bf2c-034a-4755-a90e-8bb4c0d7d55c/openstack-network-exporter/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.237302 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-smvzm_1fdfbfb5-72fa-43c9-ab7c-e824328f5a36/ovn-openstack-openstack-cell1/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.266077 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_1662531d-5c69-4b0f-a95b-008fb425954c/openstack-network-exporter/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.347135 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_1662531d-5c69-4b0f-a95b-008fb425954c/ovsdbserver-nb/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.460908 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_05759484-3ff6-46f6-9aa8-7080fbcdaed2/openstack-network-exporter/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.533956 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_05759484-3ff6-46f6-9aa8-7080fbcdaed2/ovsdbserver-nb/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.648426 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_b0f55e11-6d2f-4148-af12-86568fca039d/openstack-network-exporter/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.826409 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_b0f55e11-6d2f-4148-af12-86568fca039d/ovsdbserver-nb/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.864088 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3d5a108c-cf1e-4484-be11-3f5cb31607df/openstack-network-exporter/0.log" Feb 16 15:42:02 crc kubenswrapper[4816]: I0216 15:42:02.942760 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_3d5a108c-cf1e-4484-be11-3f5cb31607df/ovsdbserver-sb/0.log" Feb 16 15:42:03 crc kubenswrapper[4816]: I0216 15:42:03.043244 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_2cea967e-94b6-4e50-bab8-e354ae2c67cf/openstack-network-exporter/0.log" Feb 16 15:42:03 crc kubenswrapper[4816]: I0216 15:42:03.106432 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_2cea967e-94b6-4e50-bab8-e354ae2c67cf/ovsdbserver-sb/0.log" Feb 16 15:42:03 crc kubenswrapper[4816]: I0216 15:42:03.284534 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_d0882ccc-80ad-4170-92d6-bf10c5c45980/openstack-network-exporter/0.log" Feb 16 15:42:03 crc kubenswrapper[4816]: I0216 15:42:03.381283 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_d0882ccc-80ad-4170-92d6-bf10c5c45980/ovsdbserver-sb/0.log" Feb 16 15:42:03 crc kubenswrapper[4816]: I0216 15:42:03.603896 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5dc688cbd-sxhbw_1ebfe76f-1f9d-416f-bc50-190f93955818/placement-api/0.log" Feb 16 15:42:03 crc kubenswrapper[4816]: I0216 15:42:03.612411 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cnffsp_37cb37ad-8a44-4fad-bee1-b2fad177b667/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Feb 16 15:42:03 crc kubenswrapper[4816]: I0216 15:42:03.648062 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5dc688cbd-sxhbw_1ebfe76f-1f9d-416f-bc50-190f93955818/placement-log/0.log" Feb 16 15:42:03 crc kubenswrapper[4816]: I0216 15:42:03.815680 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e5cfee9b-cc72-4751-8c27-39c3ab9c3c96/init-config-reloader/0.log" Feb 16 15:42:03 crc kubenswrapper[4816]: I0216 15:42:03.964296 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e5cfee9b-cc72-4751-8c27-39c3ab9c3c96/init-config-reloader/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.030401 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e5cfee9b-cc72-4751-8c27-39c3ab9c3c96/config-reloader/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.070399 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e5cfee9b-cc72-4751-8c27-39c3ab9c3c96/thanos-sidecar/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.088010 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_e5cfee9b-cc72-4751-8c27-39c3ab9c3c96/prometheus/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.277489 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6cd70d46-7198-421e-8082-95af01516a75/setup-container/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.459107 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6cd70d46-7198-421e-8082-95af01516a75/setup-container/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.492743 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6cd70d46-7198-421e-8082-95af01516a75/rabbitmq/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.506994 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_463a765f-a9c4-41c4-8198-4852beabb6df/setup-container/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.648483 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_463a765f-a9c4-41c4-8198-4852beabb6df/setup-container/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.744422 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_463a765f-a9c4-41c4-8198-4852beabb6df/rabbitmq/0.log" Feb 16 15:42:04 crc kubenswrapper[4816]: I0216 15:42:04.955405 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-mgsws_2abfbb74-d7a6-46b1-937f-4a9a1f882215/reboot-os-openstack-openstack-cell1/0.log" Feb 16 15:42:05 crc kubenswrapper[4816]: I0216 15:42:05.040990 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-q4txh_6452ac39-25d8-4cf1-8b77-9fb9fc9ffb10/run-os-openstack-openstack-cell1/0.log" Feb 16 15:42:05 crc kubenswrapper[4816]: I0216 15:42:05.260229 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-wx6jl_0ecb519a-94e1-4adb-8356-c9836c9673ab/ssh-known-hosts-openstack/0.log" Feb 16 15:42:05 crc kubenswrapper[4816]: I0216 15:42:05.377999 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_16de8506-0f12-4daa-84b8-3cdd03d266a8/memcached/0.log" Feb 16 15:42:05 crc kubenswrapper[4816]: I0216 15:42:05.407192 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-xnj4h_d247672c-f503-4d85-a33e-b01084c23db5/telemetry-openstack-openstack-cell1/0.log" Feb 16 15:42:05 crc kubenswrapper[4816]: I0216 15:42:05.511795 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-bl8p4_34f724c7-a493-4b35-8d7a-ae2ebb52353d/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Feb 16 15:42:05 crc kubenswrapper[4816]: I0216 15:42:05.567875 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-fbgc6_6edf3408-6112-4a18-9c9c-2aebc344f6b3/validate-network-openstack-openstack-cell1/0.log" Feb 16 15:42:28 crc kubenswrapper[4816]: I0216 15:42:28.775474 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns_cf420361-538f-4de6-8e0b-09bda2ae4d4c/util/0.log" Feb 16 15:42:28 crc kubenswrapper[4816]: I0216 15:42:28.999920 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns_cf420361-538f-4de6-8e0b-09bda2ae4d4c/util/0.log" Feb 16 15:42:29 crc kubenswrapper[4816]: I0216 15:42:29.016235 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns_cf420361-538f-4de6-8e0b-09bda2ae4d4c/pull/0.log" Feb 16 15:42:29 crc kubenswrapper[4816]: I0216 15:42:29.018942 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns_cf420361-538f-4de6-8e0b-09bda2ae4d4c/pull/0.log" Feb 16 15:42:29 crc kubenswrapper[4816]: I0216 15:42:29.159031 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns_cf420361-538f-4de6-8e0b-09bda2ae4d4c/util/0.log" Feb 16 15:42:29 crc kubenswrapper[4816]: I0216 15:42:29.201632 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns_cf420361-538f-4de6-8e0b-09bda2ae4d4c/pull/0.log" Feb 16 15:42:29 crc kubenswrapper[4816]: I0216 15:42:29.205539 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_839821d02b67fa352b5f2f2742cf71374a58067197cd468c715f3fd4e7vb6ns_cf420361-538f-4de6-8e0b-09bda2ae4d4c/extract/0.log" Feb 16 15:42:30 crc kubenswrapper[4816]: I0216 15:42:30.247629 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d8bf5c495-vpfg8_509907ed-a471-4584-b564-a281e4ef6d72/manager/0.log" Feb 16 15:42:30 crc kubenswrapper[4816]: I0216 15:42:30.822838 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-868647ff47-f8lx7_04a6d824-7601-4884-9b45-4a9d7b5154af/manager/0.log" Feb 16 15:42:30 crc kubenswrapper[4816]: I0216 15:42:30.922714 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987464f4-2qsmj_23786cf0-20d5-45c1-8081-3b0e7ac9fd1a/manager/0.log" Feb 16 15:42:31 crc kubenswrapper[4816]: I0216 15:42:31.021294 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69f49c598c-pxnx2_b6606fb3-5f90-403d-9730-10fa7c420e51/manager/0.log" Feb 16 15:42:31 crc kubenswrapper[4816]: I0216 15:42:31.160592 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5b9b8895d5-7jg9h_12ae487a-61f4-46f6-835a-a9beb1b66fc5/manager/0.log" Feb 16 15:42:31 crc kubenswrapper[4816]: I0216 15:42:31.574665 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-554564d7fc-nqxbf_88fcb741-6fee-4112-a1c7-5badac51848a/manager/0.log" Feb 16 15:42:32 crc kubenswrapper[4816]: I0216 15:42:32.021692 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b4d948c87-pxkc2_e0aa9860-8969-41d6-8cdf-c71c2aa4c167/manager/0.log" Feb 16 15:42:32 crc kubenswrapper[4816]: I0216 15:42:32.167765 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-54f6768c69-5fh52_636c0f15-1128-4cca-a9fa-b2e5a58607d4/manager/0.log" Feb 16 15:42:32 crc kubenswrapper[4816]: I0216 15:42:32.240417 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79d975b745-rx9cb_2b1caddd-fa9b-49de-bada-8cbeb89882e7/manager/0.log" Feb 16 15:42:32 crc kubenswrapper[4816]: I0216 15:42:32.437511 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6994f66f48-ssbf6_de3a103e-4d92-4cc1-a7c2-5ab7c14da448/manager/0.log" Feb 16 15:42:32 crc kubenswrapper[4816]: I0216 15:42:32.636435 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64ddbf8bb-5msw2_8773a01d-7f52-46e1-bbaa-92d6d385e41b/manager/0.log" Feb 16 15:42:33 crc kubenswrapper[4816]: I0216 15:42:33.139863 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-567668f5cf-m8ppz_c76b64f0-d1e3-4018-b694-958755f15cbe/manager/0.log" Feb 16 15:42:33 crc kubenswrapper[4816]: I0216 15:42:33.151639 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7c6767dc9cptvrd_ee88d6f1-148e-4a18-ae88-4bdda1df4d65/manager/0.log" Feb 16 15:42:33 crc kubenswrapper[4816]: I0216 15:42:33.301229 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-5d946d989d-zgztq_3088a0a0-c2b4-42a5-8411-f966d8abb311/manager/0.log" Feb 16 15:42:33 crc kubenswrapper[4816]: I0216 15:42:33.548071 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-7f746469c7-2pqn2_1633ec9f-6f03-436b-a464-386fefc9ea22/operator/0.log" Feb 16 15:42:33 crc kubenswrapper[4816]: I0216 15:42:33.589836 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-h6rdl_dc1bb7d3-5870-411d-81f5-3aa5ef055420/registry-server/0.log" Feb 16 15:42:33 crc kubenswrapper[4816]: I0216 15:42:33.945979 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-d44cf6b75-8f9p2_0c9bc0d8-0d94-4406-88bb-c4b4e85cdf66/manager/0.log" Feb 16 15:42:34 crc kubenswrapper[4816]: I0216 15:42:34.026691 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-8497b45c89-xt5jt_76800815-dfe9-4b57-beaf-7d9817688213/manager/0.log" Feb 16 15:42:34 crc kubenswrapper[4816]: I0216 15:42:34.175415 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-n7pnd_bce4fbe9-0339-4bef-b723-2ab711bb41df/operator/0.log" Feb 16 15:42:34 crc kubenswrapper[4816]: I0216 15:42:34.244050 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68f46476f-hpwxn_dae97001-293e-4307-8e11-86a9bb275b85/manager/0.log" Feb 16 15:42:34 crc kubenswrapper[4816]: I0216 15:42:34.675696 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-7866795846-xpjn9_7008605c-3daf-47aa-8c93-4f5b58a5c406/manager/0.log" Feb 16 15:42:34 crc kubenswrapper[4816]: I0216 15:42:34.842119 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-7f45b4ff68-v5zp6_e9e1a6a3-c57f-4bd4-b626-a5a2fc9a079e/manager/0.log" Feb 16 15:42:34 crc kubenswrapper[4816]: I0216 15:42:34.949792 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5db88f68c-htckm_b23139ab-59af-4013-8d83-067804821ab2/manager/0.log" Feb 16 15:42:36 crc kubenswrapper[4816]: I0216 15:42:36.940426 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:42:36 crc kubenswrapper[4816]: I0216 15:42:36.940861 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:42:37 crc kubenswrapper[4816]: I0216 15:42:37.205280 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5cd688d8fc-w74kn_bf9c19c6-7076-4c47-872d-92639392fe05/manager/0.log" Feb 16 15:42:37 crc kubenswrapper[4816]: I0216 15:42:37.597560 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69f8888797-4lc7x_d9f341ed-962a-4faa-bdad-ffec10941d95/manager/0.log" Feb 16 15:42:59 crc kubenswrapper[4816]: I0216 15:42:59.187186 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-bcsnm_13129e04-c3d9-4387-bd36-b673b082d90e/control-plane-machine-set-operator/0.log" Feb 16 15:42:59 crc kubenswrapper[4816]: I0216 15:42:59.860645 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9mmht_28235c00-42f1-4935-9b42-c055518c28d3/machine-api-operator/0.log" Feb 16 15:42:59 crc kubenswrapper[4816]: I0216 15:42:59.897329 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9mmht_28235c00-42f1-4935-9b42-c055518c28d3/kube-rbac-proxy/0.log" Feb 16 15:43:06 crc kubenswrapper[4816]: I0216 15:43:06.940492 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:43:06 crc kubenswrapper[4816]: I0216 15:43:06.941168 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.225781 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-whhq6"] Feb 16 15:43:08 crc kubenswrapper[4816]: E0216 15:43:08.226722 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="898a6563-2c99-4227-aada-f51751eeb62c" containerName="extract-utilities" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.226750 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="898a6563-2c99-4227-aada-f51751eeb62c" containerName="extract-utilities" Feb 16 15:43:08 crc kubenswrapper[4816]: E0216 15:43:08.226770 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="898a6563-2c99-4227-aada-f51751eeb62c" containerName="extract-content" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.226798 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="898a6563-2c99-4227-aada-f51751eeb62c" containerName="extract-content" Feb 16 15:43:08 crc kubenswrapper[4816]: E0216 15:43:08.226827 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="898a6563-2c99-4227-aada-f51751eeb62c" containerName="registry-server" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.226835 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="898a6563-2c99-4227-aada-f51751eeb62c" containerName="registry-server" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.227158 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="898a6563-2c99-4227-aada-f51751eeb62c" containerName="registry-server" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.229198 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.242730 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-whhq6"] Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.273100 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-utilities\") pod \"redhat-operators-whhq6\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.273286 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-catalog-content\") pod \"redhat-operators-whhq6\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.273471 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm7bz\" (UniqueName: \"kubernetes.io/projected/bd700467-55a7-468b-bd78-3152b0bb3f9b-kube-api-access-cm7bz\") pod \"redhat-operators-whhq6\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.375523 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-catalog-content\") pod \"redhat-operators-whhq6\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.375920 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm7bz\" (UniqueName: \"kubernetes.io/projected/bd700467-55a7-468b-bd78-3152b0bb3f9b-kube-api-access-cm7bz\") pod \"redhat-operators-whhq6\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.375998 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-utilities\") pod \"redhat-operators-whhq6\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.376247 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-catalog-content\") pod \"redhat-operators-whhq6\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.376404 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-utilities\") pod \"redhat-operators-whhq6\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.406234 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm7bz\" (UniqueName: \"kubernetes.io/projected/bd700467-55a7-468b-bd78-3152b0bb3f9b-kube-api-access-cm7bz\") pod \"redhat-operators-whhq6\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:08 crc kubenswrapper[4816]: I0216 15:43:08.577288 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:09 crc kubenswrapper[4816]: I0216 15:43:09.100257 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-whhq6"] Feb 16 15:43:09 crc kubenswrapper[4816]: I0216 15:43:09.182389 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whhq6" event={"ID":"bd700467-55a7-468b-bd78-3152b0bb3f9b","Type":"ContainerStarted","Data":"7916fe86732329ddcaef83e5ae659ccd18ba39eb62cfc5e71d4b52e55ace32f3"} Feb 16 15:43:10 crc kubenswrapper[4816]: I0216 15:43:10.801946 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="2152ebde-94a5-401a-a9c0-f2ca76c5a16e" containerName="galera" probeResult="failure" output="command timed out" Feb 16 15:43:11 crc kubenswrapper[4816]: I0216 15:43:11.544768 4816 trace.go:236] Trace[1879310721]: "Calculate volume metrics of ovndbcluster-nb-etc-ovn for pod openstack/ovsdbserver-nb-0" (16-Feb-2026 15:43:09.645) (total time: 1899ms): Feb 16 15:43:11 crc kubenswrapper[4816]: Trace[1879310721]: [1.89935216s] [1.89935216s] END Feb 16 15:43:12 crc kubenswrapper[4816]: I0216 15:43:12.237705 4816 generic.go:334] "Generic (PLEG): container finished" podID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerID="743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2" exitCode=0 Feb 16 15:43:12 crc kubenswrapper[4816]: I0216 15:43:12.237984 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whhq6" event={"ID":"bd700467-55a7-468b-bd78-3152b0bb3f9b","Type":"ContainerDied","Data":"743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2"} Feb 16 15:43:12 crc kubenswrapper[4816]: I0216 15:43:12.242486 4816 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 16 15:43:14 crc kubenswrapper[4816]: I0216 15:43:14.265998 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whhq6" event={"ID":"bd700467-55a7-468b-bd78-3152b0bb3f9b","Type":"ContainerStarted","Data":"43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb"} Feb 16 15:43:15 crc kubenswrapper[4816]: I0216 15:43:15.511300 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-545d4d4674-8z8xq_5dea32e8-76ae-4e83-a2d1-49410b066382/cert-manager-controller/0.log" Feb 16 15:43:15 crc kubenswrapper[4816]: I0216 15:43:15.704353 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-5545bd876-mxfr4_ef08db43-1ed7-4676-9c65-a7de37ad26de/cert-manager-cainjector/0.log" Feb 16 15:43:15 crc kubenswrapper[4816]: I0216 15:43:15.820505 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-6888856db4-p4ghc_f05f1fe5-1b23-4adb-a77f-b2f665050c31/cert-manager-webhook/0.log" Feb 16 15:43:20 crc kubenswrapper[4816]: I0216 15:43:20.335211 4816 generic.go:334] "Generic (PLEG): container finished" podID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerID="43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb" exitCode=0 Feb 16 15:43:20 crc kubenswrapper[4816]: I0216 15:43:20.335294 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whhq6" event={"ID":"bd700467-55a7-468b-bd78-3152b0bb3f9b","Type":"ContainerDied","Data":"43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb"} Feb 16 15:43:21 crc kubenswrapper[4816]: I0216 15:43:21.348306 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whhq6" event={"ID":"bd700467-55a7-468b-bd78-3152b0bb3f9b","Type":"ContainerStarted","Data":"d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e"} Feb 16 15:43:21 crc kubenswrapper[4816]: I0216 15:43:21.378698 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-whhq6" podStartSLOduration=4.820649541 podStartE2EDuration="13.378634874s" podCreationTimestamp="2026-02-16 15:43:08 +0000 UTC" firstStartedPulling="2026-02-16 15:43:12.24091121 +0000 UTC m=+9591.567624938" lastFinishedPulling="2026-02-16 15:43:20.798896543 +0000 UTC m=+9600.125610271" observedRunningTime="2026-02-16 15:43:21.37369463 +0000 UTC m=+9600.700408358" watchObservedRunningTime="2026-02-16 15:43:21.378634874 +0000 UTC m=+9600.705348602" Feb 16 15:43:28 crc kubenswrapper[4816]: I0216 15:43:28.578529 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:28 crc kubenswrapper[4816]: I0216 15:43:28.579088 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:29 crc kubenswrapper[4816]: I0216 15:43:29.630799 4816 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-whhq6" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerName="registry-server" probeResult="failure" output=< Feb 16 15:43:29 crc kubenswrapper[4816]: timeout: failed to connect service ":50051" within 1s Feb 16 15:43:29 crc kubenswrapper[4816]: > Feb 16 15:43:30 crc kubenswrapper[4816]: I0216 15:43:30.457584 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-5c78fc5d65-sl8qx_ea4dc88c-2154-46af-937c-341a1afd226e/nmstate-console-plugin/0.log" Feb 16 15:43:30 crc kubenswrapper[4816]: I0216 15:43:30.605366 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58c85c668d-294cz_acc14b47-c3db-4a65-9f0b-f50acf3d3cb0/kube-rbac-proxy/0.log" Feb 16 15:43:30 crc kubenswrapper[4816]: I0216 15:43:30.634076 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-tc2tm_76133951-996b-4da9-aea3-f2095b86b4c6/nmstate-handler/0.log" Feb 16 15:43:30 crc kubenswrapper[4816]: I0216 15:43:30.686480 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58c85c668d-294cz_acc14b47-c3db-4a65-9f0b-f50acf3d3cb0/nmstate-metrics/0.log" Feb 16 15:43:30 crc kubenswrapper[4816]: I0216 15:43:30.899326 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-694c9596b7-dsvhf_1a2b9a5f-f606-4247-83e8-efaa7185c2fb/nmstate-operator/0.log" Feb 16 15:43:30 crc kubenswrapper[4816]: I0216 15:43:30.911700 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-866bcb46dc-cd8sr_c962989d-280a-449a-aa65-bd2c95cf319f/nmstate-webhook/0.log" Feb 16 15:43:36 crc kubenswrapper[4816]: I0216 15:43:36.940752 4816 patch_prober.go:28] interesting pod/machine-config-daemon-f95nc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 16 15:43:36 crc kubenswrapper[4816]: I0216 15:43:36.941255 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 16 15:43:36 crc kubenswrapper[4816]: I0216 15:43:36.941313 4816 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" Feb 16 15:43:36 crc kubenswrapper[4816]: I0216 15:43:36.942326 4816 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca"} pod="openshift-machine-config-operator/machine-config-daemon-f95nc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 16 15:43:36 crc kubenswrapper[4816]: I0216 15:43:36.942401 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerName="machine-config-daemon" containerID="cri-o://fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" gracePeriod=600 Feb 16 15:43:37 crc kubenswrapper[4816]: E0216 15:43:37.071712 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:43:37 crc kubenswrapper[4816]: I0216 15:43:37.495279 4816 generic.go:334] "Generic (PLEG): container finished" podID="eb19d695-8c09-42cc-bc34-940019ab38dc" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" exitCode=0 Feb 16 15:43:37 crc kubenswrapper[4816]: I0216 15:43:37.495351 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerDied","Data":"fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca"} Feb 16 15:43:37 crc kubenswrapper[4816]: I0216 15:43:37.495404 4816 scope.go:117] "RemoveContainer" containerID="d0e2ebe7423c9baf282febac1b199f56648fc3853dd1643a2793afe6c7da1d19" Feb 16 15:43:37 crc kubenswrapper[4816]: I0216 15:43:37.496100 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:43:37 crc kubenswrapper[4816]: E0216 15:43:37.496354 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:43:38 crc kubenswrapper[4816]: I0216 15:43:38.640603 4816 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:38 crc kubenswrapper[4816]: I0216 15:43:38.710325 4816 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:39 crc kubenswrapper[4816]: I0216 15:43:39.426046 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-whhq6"] Feb 16 15:43:40 crc kubenswrapper[4816]: I0216 15:43:40.524314 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-whhq6" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerName="registry-server" containerID="cri-o://d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e" gracePeriod=2 Feb 16 15:43:40 crc kubenswrapper[4816]: I0216 15:43:40.982162 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.043840 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-catalog-content\") pod \"bd700467-55a7-468b-bd78-3152b0bb3f9b\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.043898 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cm7bz\" (UniqueName: \"kubernetes.io/projected/bd700467-55a7-468b-bd78-3152b0bb3f9b-kube-api-access-cm7bz\") pod \"bd700467-55a7-468b-bd78-3152b0bb3f9b\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.044145 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-utilities\") pod \"bd700467-55a7-468b-bd78-3152b0bb3f9b\" (UID: \"bd700467-55a7-468b-bd78-3152b0bb3f9b\") " Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.044838 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-utilities" (OuterVolumeSpecName: "utilities") pod "bd700467-55a7-468b-bd78-3152b0bb3f9b" (UID: "bd700467-55a7-468b-bd78-3152b0bb3f9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.054528 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd700467-55a7-468b-bd78-3152b0bb3f9b-kube-api-access-cm7bz" (OuterVolumeSpecName: "kube-api-access-cm7bz") pod "bd700467-55a7-468b-bd78-3152b0bb3f9b" (UID: "bd700467-55a7-468b-bd78-3152b0bb3f9b"). InnerVolumeSpecName "kube-api-access-cm7bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.147115 4816 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-utilities\") on node \"crc\" DevicePath \"\"" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.147402 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cm7bz\" (UniqueName: \"kubernetes.io/projected/bd700467-55a7-468b-bd78-3152b0bb3f9b-kube-api-access-cm7bz\") on node \"crc\" DevicePath \"\"" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.165132 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd700467-55a7-468b-bd78-3152b0bb3f9b" (UID: "bd700467-55a7-468b-bd78-3152b0bb3f9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.249401 4816 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd700467-55a7-468b-bd78-3152b0bb3f9b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.539873 4816 generic.go:334] "Generic (PLEG): container finished" podID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerID="d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e" exitCode=0 Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.539965 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-whhq6" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.539962 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whhq6" event={"ID":"bd700467-55a7-468b-bd78-3152b0bb3f9b","Type":"ContainerDied","Data":"d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e"} Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.540453 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-whhq6" event={"ID":"bd700467-55a7-468b-bd78-3152b0bb3f9b","Type":"ContainerDied","Data":"7916fe86732329ddcaef83e5ae659ccd18ba39eb62cfc5e71d4b52e55ace32f3"} Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.540475 4816 scope.go:117] "RemoveContainer" containerID="d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.565366 4816 scope.go:117] "RemoveContainer" containerID="43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.567494 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-whhq6"] Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.584027 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-whhq6"] Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.596868 4816 scope.go:117] "RemoveContainer" containerID="743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.643614 4816 scope.go:117] "RemoveContainer" containerID="d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e" Feb 16 15:43:41 crc kubenswrapper[4816]: E0216 15:43:41.644346 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e\": container with ID starting with d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e not found: ID does not exist" containerID="d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.644397 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e"} err="failed to get container status \"d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e\": rpc error: code = NotFound desc = could not find container \"d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e\": container with ID starting with d696198defd3cf44fdf11b8c33a174fee0e8c75835f6254f5aa9321ef7f2634e not found: ID does not exist" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.644428 4816 scope.go:117] "RemoveContainer" containerID="43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb" Feb 16 15:43:41 crc kubenswrapper[4816]: E0216 15:43:41.644851 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb\": container with ID starting with 43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb not found: ID does not exist" containerID="43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.644923 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb"} err="failed to get container status \"43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb\": rpc error: code = NotFound desc = could not find container \"43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb\": container with ID starting with 43d78528cf8e9093d5cdbd9382a397991e573923987fe500e9b161bd73807fbb not found: ID does not exist" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.644965 4816 scope.go:117] "RemoveContainer" containerID="743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2" Feb 16 15:43:41 crc kubenswrapper[4816]: E0216 15:43:41.645332 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2\": container with ID starting with 743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2 not found: ID does not exist" containerID="743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2" Feb 16 15:43:41 crc kubenswrapper[4816]: I0216 15:43:41.645370 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2"} err="failed to get container status \"743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2\": rpc error: code = NotFound desc = could not find container \"743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2\": container with ID starting with 743fbeb00a18fdca12c634faa401d67515d4684f4c81ac230015649b2d5dc0a2 not found: ID does not exist" Feb 16 15:43:43 crc kubenswrapper[4816]: I0216 15:43:43.424111 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" path="/var/lib/kubelet/pods/bd700467-55a7-468b-bd78-3152b0bb3f9b/volumes" Feb 16 15:43:47 crc kubenswrapper[4816]: I0216 15:43:47.210249 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-wtmzg_dfea21df-03cb-4b66-be23-7d06f1036ac6/prometheus-operator/0.log" Feb 16 15:43:47 crc kubenswrapper[4816]: I0216 15:43:47.414133 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7_0eb83954-2f81-4057-bbf7-c10ce7aba9fd/prometheus-operator-admission-webhook/0.log" Feb 16 15:43:47 crc kubenswrapper[4816]: I0216 15:43:47.445889 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm_52fc463e-620e-4f7b-94df-67a832835a06/prometheus-operator-admission-webhook/0.log" Feb 16 15:43:47 crc kubenswrapper[4816]: I0216 15:43:47.610945 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-z8fb9_ccea818a-55d6-46cf-b00b-a888623a16d6/perses-operator/0.log" Feb 16 15:43:47 crc kubenswrapper[4816]: I0216 15:43:47.663003 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-vtzp5_526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b/operator/0.log" Feb 16 15:43:51 crc kubenswrapper[4816]: I0216 15:43:51.406916 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:43:51 crc kubenswrapper[4816]: E0216 15:43:51.407877 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.134952 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-69bbfbf88f-srbbd_8f542a71-53fe-4588-85ed-1c8bffb0b2c2/kube-rbac-proxy/0.log" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.393302 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-frr-files/0.log" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.596952 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-69bbfbf88f-srbbd_8f542a71-53fe-4588-85ed-1c8bffb0b2c2/controller/0.log" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.647126 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-metrics/0.log" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.666755 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-reloader/0.log" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.691120 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-frr-files/0.log" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.778246 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-reloader/0.log" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.945674 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-frr-files/0.log" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.971781 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-reloader/0.log" Feb 16 15:44:03 crc kubenswrapper[4816]: I0216 15:44:03.985896 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-metrics/0.log" Feb 16 15:44:04 crc kubenswrapper[4816]: I0216 15:44:04.012985 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-metrics/0.log" Feb 16 15:44:04 crc kubenswrapper[4816]: I0216 15:44:04.153086 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-reloader/0.log" Feb 16 15:44:04 crc kubenswrapper[4816]: I0216 15:44:04.158151 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-frr-files/0.log" Feb 16 15:44:04 crc kubenswrapper[4816]: I0216 15:44:04.399097 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:44:04 crc kubenswrapper[4816]: E0216 15:44:04.399504 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:44:05 crc kubenswrapper[4816]: I0216 15:44:05.111213 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/controller/0.log" Feb 16 15:44:05 crc kubenswrapper[4816]: I0216 15:44:05.148209 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/cp-metrics/0.log" Feb 16 15:44:05 crc kubenswrapper[4816]: I0216 15:44:05.171895 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/frr-metrics/0.log" Feb 16 15:44:05 crc kubenswrapper[4816]: I0216 15:44:05.364575 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/kube-rbac-proxy/0.log" Feb 16 15:44:05 crc kubenswrapper[4816]: I0216 15:44:05.399381 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/reloader/0.log" Feb 16 15:44:05 crc kubenswrapper[4816]: I0216 15:44:05.404286 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/kube-rbac-proxy-frr/0.log" Feb 16 15:44:05 crc kubenswrapper[4816]: I0216 15:44:05.626434 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-78b44bf5bb-ctqqb_0fa65809-3f66-4677-bec4-82775c83a07f/frr-k8s-webhook-server/0.log" Feb 16 15:44:05 crc kubenswrapper[4816]: I0216 15:44:05.896988 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5c799b7d57-xrnz7_b88b2649-f69f-4572-9db2-a66e4ac5ec2d/manager/0.log" Feb 16 15:44:05 crc kubenswrapper[4816]: I0216 15:44:05.987324 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-54976bdc59-m2djd_1ce8bd4d-5ddc-43bc-9a7c-bf99ca1f29c1/webhook-server/0.log" Feb 16 15:44:06 crc kubenswrapper[4816]: I0216 15:44:06.263077 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-46t2n_a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4/kube-rbac-proxy/0.log" Feb 16 15:44:07 crc kubenswrapper[4816]: I0216 15:44:07.187800 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-46t2n_a3d90fa9-733d-4cd0-b4c3-5c1c3c7308a4/speaker/0.log" Feb 16 15:44:08 crc kubenswrapper[4816]: I0216 15:44:08.138197 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sl9h9_9b2ea8e1-2288-4080-a20e-637ea18dc35c/frr/0.log" Feb 16 15:44:17 crc kubenswrapper[4816]: I0216 15:44:17.399070 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:44:17 crc kubenswrapper[4816]: E0216 15:44:17.400015 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:44:21 crc kubenswrapper[4816]: I0216 15:44:21.449930 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv_1a1c7127-fb16-4db0-ba07-5a9589c5ace2/util/0.log" Feb 16 15:44:21 crc kubenswrapper[4816]: I0216 15:44:21.634726 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv_1a1c7127-fb16-4db0-ba07-5a9589c5ace2/util/0.log" Feb 16 15:44:21 crc kubenswrapper[4816]: I0216 15:44:21.690366 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv_1a1c7127-fb16-4db0-ba07-5a9589c5ace2/pull/0.log" Feb 16 15:44:21 crc kubenswrapper[4816]: I0216 15:44:21.722183 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv_1a1c7127-fb16-4db0-ba07-5a9589c5ace2/pull/0.log" Feb 16 15:44:21 crc kubenswrapper[4816]: I0216 15:44:21.907016 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv_1a1c7127-fb16-4db0-ba07-5a9589c5ace2/util/0.log" Feb 16 15:44:21 crc kubenswrapper[4816]: I0216 15:44:21.962130 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv_1a1c7127-fb16-4db0-ba07-5a9589c5ace2/extract/0.log" Feb 16 15:44:21 crc kubenswrapper[4816]: I0216 15:44:21.968500 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5kd8dv_1a1c7127-fb16-4db0-ba07-5a9589c5ace2/pull/0.log" Feb 16 15:44:22 crc kubenswrapper[4816]: I0216 15:44:22.129336 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg_f31af8d4-0560-48c4-8471-8fc736b13844/util/0.log" Feb 16 15:44:22 crc kubenswrapper[4816]: I0216 15:44:22.311485 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg_f31af8d4-0560-48c4-8471-8fc736b13844/util/0.log" Feb 16 15:44:22 crc kubenswrapper[4816]: I0216 15:44:22.324748 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg_f31af8d4-0560-48c4-8471-8fc736b13844/pull/0.log" Feb 16 15:44:22 crc kubenswrapper[4816]: I0216 15:44:22.366241 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg_f31af8d4-0560-48c4-8471-8fc736b13844/pull/0.log" Feb 16 15:44:22 crc kubenswrapper[4816]: I0216 15:44:22.487300 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg_f31af8d4-0560-48c4-8471-8fc736b13844/util/0.log" Feb 16 15:44:22 crc kubenswrapper[4816]: I0216 15:44:22.494717 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg_f31af8d4-0560-48c4-8471-8fc736b13844/pull/0.log" Feb 16 15:44:22 crc kubenswrapper[4816]: I0216 15:44:22.500004 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08sdxrg_f31af8d4-0560-48c4-8471-8fc736b13844/extract/0.log" Feb 16 15:44:22 crc kubenswrapper[4816]: I0216 15:44:22.745982 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9_1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1/util/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.112421 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9_1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1/pull/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.128863 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9_1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1/util/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.139540 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9_1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1/pull/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.342796 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9_1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1/util/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.356647 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9_1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1/extract/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.365092 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a9b3ed1fe9273b725119dcfb777257f08e39bbefccdf592dce2d0dc213r9qj9_1bfa1830-5b2c-4d72-9352-7fe1a4ed54f1/pull/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.495110 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9cbbd_286af964-57b7-4758-807c-14cc3d67f1e9/extract-utilities/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.746981 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9cbbd_286af964-57b7-4758-807c-14cc3d67f1e9/extract-content/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.769319 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9cbbd_286af964-57b7-4758-807c-14cc3d67f1e9/extract-utilities/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.790504 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9cbbd_286af964-57b7-4758-807c-14cc3d67f1e9/extract-content/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.975579 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9cbbd_286af964-57b7-4758-807c-14cc3d67f1e9/extract-content/0.log" Feb 16 15:44:23 crc kubenswrapper[4816]: I0216 15:44:23.989571 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9cbbd_286af964-57b7-4758-807c-14cc3d67f1e9/extract-utilities/0.log" Feb 16 15:44:24 crc kubenswrapper[4816]: I0216 15:44:24.229530 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b2x65_13bf1400-800d-4ec5-ad7d-af42faede5b4/extract-utilities/0.log" Feb 16 15:44:24 crc kubenswrapper[4816]: I0216 15:44:24.496973 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b2x65_13bf1400-800d-4ec5-ad7d-af42faede5b4/extract-utilities/0.log" Feb 16 15:44:24 crc kubenswrapper[4816]: I0216 15:44:24.530047 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b2x65_13bf1400-800d-4ec5-ad7d-af42faede5b4/extract-content/0.log" Feb 16 15:44:24 crc kubenswrapper[4816]: I0216 15:44:24.535705 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b2x65_13bf1400-800d-4ec5-ad7d-af42faede5b4/extract-content/0.log" Feb 16 15:44:24 crc kubenswrapper[4816]: I0216 15:44:24.783584 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b2x65_13bf1400-800d-4ec5-ad7d-af42faede5b4/extract-content/0.log" Feb 16 15:44:24 crc kubenswrapper[4816]: I0216 15:44:24.822534 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b2x65_13bf1400-800d-4ec5-ad7d-af42faede5b4/extract-utilities/0.log" Feb 16 15:44:25 crc kubenswrapper[4816]: I0216 15:44:25.254201 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9cbbd_286af964-57b7-4758-807c-14cc3d67f1e9/registry-server/0.log" Feb 16 15:44:25 crc kubenswrapper[4816]: I0216 15:44:25.503706 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef/util/0.log" Feb 16 15:44:25 crc kubenswrapper[4816]: I0216 15:44:25.658267 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef/pull/0.log" Feb 16 15:44:25 crc kubenswrapper[4816]: I0216 15:44:25.730913 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef/util/0.log" Feb 16 15:44:25 crc kubenswrapper[4816]: I0216 15:44:25.737196 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef/pull/0.log" Feb 16 15:44:25 crc kubenswrapper[4816]: I0216 15:44:25.989230 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef/pull/0.log" Feb 16 15:44:25 crc kubenswrapper[4816]: I0216 15:44:25.990317 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b2x65_13bf1400-800d-4ec5-ad7d-af42faede5b4/registry-server/0.log" Feb 16 15:44:26 crc kubenswrapper[4816]: I0216 15:44:26.011453 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef/util/0.log" Feb 16 15:44:26 crc kubenswrapper[4816]: I0216 15:44:26.025817 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f938df2ce267491f058ea7e3036e97ee3f65bf3665185b1a4f52323ecah9dgp_ba88b0ce-2a7b-4a6c-9c45-9094f54f17ef/extract/0.log" Feb 16 15:44:26 crc kubenswrapper[4816]: I0216 15:44:26.169706 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-xx4rl_edc67f7a-d508-4d46-b845-353aadc07314/marketplace-operator/0.log" Feb 16 15:44:26 crc kubenswrapper[4816]: I0216 15:44:26.185945 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tcn99_2b0e4282-5491-447a-ad48-d13db1ea995d/extract-utilities/0.log" Feb 16 15:44:26 crc kubenswrapper[4816]: I0216 15:44:26.367415 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tcn99_2b0e4282-5491-447a-ad48-d13db1ea995d/extract-utilities/0.log" Feb 16 15:44:26 crc kubenswrapper[4816]: I0216 15:44:26.395609 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tcn99_2b0e4282-5491-447a-ad48-d13db1ea995d/extract-content/0.log" Feb 16 15:44:26 crc kubenswrapper[4816]: I0216 15:44:26.396309 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tcn99_2b0e4282-5491-447a-ad48-d13db1ea995d/extract-content/0.log" Feb 16 15:44:26 crc kubenswrapper[4816]: I0216 15:44:26.548714 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tcn99_2b0e4282-5491-447a-ad48-d13db1ea995d/extract-utilities/0.log" Feb 16 15:44:26 crc kubenswrapper[4816]: I0216 15:44:26.575549 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tcn99_2b0e4282-5491-447a-ad48-d13db1ea995d/extract-content/0.log" Feb 16 15:44:27 crc kubenswrapper[4816]: I0216 15:44:27.254945 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rtqdh_f61f8b10-6688-4ecf-a8fb-110be20f0314/extract-utilities/0.log" Feb 16 15:44:27 crc kubenswrapper[4816]: I0216 15:44:27.501464 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tcn99_2b0e4282-5491-447a-ad48-d13db1ea995d/registry-server/0.log" Feb 16 15:44:27 crc kubenswrapper[4816]: I0216 15:44:27.509200 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rtqdh_f61f8b10-6688-4ecf-a8fb-110be20f0314/extract-content/0.log" Feb 16 15:44:27 crc kubenswrapper[4816]: I0216 15:44:27.524098 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rtqdh_f61f8b10-6688-4ecf-a8fb-110be20f0314/extract-utilities/0.log" Feb 16 15:44:27 crc kubenswrapper[4816]: I0216 15:44:27.526048 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rtqdh_f61f8b10-6688-4ecf-a8fb-110be20f0314/extract-content/0.log" Feb 16 15:44:27 crc kubenswrapper[4816]: I0216 15:44:27.766815 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rtqdh_f61f8b10-6688-4ecf-a8fb-110be20f0314/extract-content/0.log" Feb 16 15:44:27 crc kubenswrapper[4816]: I0216 15:44:27.767054 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rtqdh_f61f8b10-6688-4ecf-a8fb-110be20f0314/extract-utilities/0.log" Feb 16 15:44:28 crc kubenswrapper[4816]: I0216 15:44:28.667872 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rtqdh_f61f8b10-6688-4ecf-a8fb-110be20f0314/registry-server/0.log" Feb 16 15:44:32 crc kubenswrapper[4816]: I0216 15:44:32.399165 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:44:32 crc kubenswrapper[4816]: E0216 15:44:32.400468 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:44:42 crc kubenswrapper[4816]: I0216 15:44:42.959063 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79d9b758b-2w5b7_0eb83954-2f81-4057-bbf7-c10ce7aba9fd/prometheus-operator-admission-webhook/0.log" Feb 16 15:44:43 crc kubenswrapper[4816]: I0216 15:44:43.026679 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-79d9b758b-nq7mm_52fc463e-620e-4f7b-94df-67a832835a06/prometheus-operator-admission-webhook/0.log" Feb 16 15:44:43 crc kubenswrapper[4816]: I0216 15:44:43.060448 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-wtmzg_dfea21df-03cb-4b66-be23-7d06f1036ac6/prometheus-operator/0.log" Feb 16 15:44:43 crc kubenswrapper[4816]: I0216 15:44:43.143759 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-vtzp5_526d1d8d-e72d-4ef9-a0ca-4b7fd74bd98b/operator/0.log" Feb 16 15:44:43 crc kubenswrapper[4816]: I0216 15:44:43.182843 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-z8fb9_ccea818a-55d6-46cf-b00b-a888623a16d6/perses-operator/0.log" Feb 16 15:44:43 crc kubenswrapper[4816]: I0216 15:44:43.399230 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:44:43 crc kubenswrapper[4816]: E0216 15:44:43.399528 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:44:57 crc kubenswrapper[4816]: I0216 15:44:57.399017 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:44:57 crc kubenswrapper[4816]: E0216 15:44:57.400114 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.166135 4816 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj"] Feb 16 15:45:00 crc kubenswrapper[4816]: E0216 15:45:00.167211 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerName="extract-utilities" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.167230 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerName="extract-utilities" Feb 16 15:45:00 crc kubenswrapper[4816]: E0216 15:45:00.167247 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerName="extract-content" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.167253 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerName="extract-content" Feb 16 15:45:00 crc kubenswrapper[4816]: E0216 15:45:00.167264 4816 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerName="registry-server" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.167270 4816 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerName="registry-server" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.167518 4816 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd700467-55a7-468b-bd78-3152b0bb3f9b" containerName="registry-server" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.168364 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.178400 4816 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.179305 4816 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.184000 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj"] Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.270875 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvn9z\" (UniqueName: \"kubernetes.io/projected/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-kube-api-access-mvn9z\") pod \"collect-profiles-29520945-9jxhj\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.271013 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-config-volume\") pod \"collect-profiles-29520945-9jxhj\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.271054 4816 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-secret-volume\") pod \"collect-profiles-29520945-9jxhj\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.373117 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvn9z\" (UniqueName: \"kubernetes.io/projected/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-kube-api-access-mvn9z\") pod \"collect-profiles-29520945-9jxhj\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.373254 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-config-volume\") pod \"collect-profiles-29520945-9jxhj\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.373293 4816 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-secret-volume\") pod \"collect-profiles-29520945-9jxhj\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.374535 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-config-volume\") pod \"collect-profiles-29520945-9jxhj\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.381321 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-secret-volume\") pod \"collect-profiles-29520945-9jxhj\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.394364 4816 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvn9z\" (UniqueName: \"kubernetes.io/projected/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-kube-api-access-mvn9z\") pod \"collect-profiles-29520945-9jxhj\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:00 crc kubenswrapper[4816]: I0216 15:45:00.501939 4816 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:01 crc kubenswrapper[4816]: I0216 15:45:01.093939 4816 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj"] Feb 16 15:45:01 crc kubenswrapper[4816]: I0216 15:45:01.412493 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" event={"ID":"ef8aebd6-9be8-462c-947f-ab4c7af14f7c","Type":"ContainerStarted","Data":"3953f0f80d7ef5d6f51f428d23941e95d60f2432ce00fb3ab253fd1182823e08"} Feb 16 15:45:01 crc kubenswrapper[4816]: I0216 15:45:01.412874 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" event={"ID":"ef8aebd6-9be8-462c-947f-ab4c7af14f7c","Type":"ContainerStarted","Data":"d163f65c616d8d8a3a081ca99481cb1446146a5754e8dcfa349dd3c3ec076922"} Feb 16 15:45:01 crc kubenswrapper[4816]: I0216 15:45:01.458574 4816 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" podStartSLOduration=1.4585560069999999 podStartE2EDuration="1.458556007s" podCreationTimestamp="2026-02-16 15:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-16 15:45:01.43815253 +0000 UTC m=+9700.764866258" watchObservedRunningTime="2026-02-16 15:45:01.458556007 +0000 UTC m=+9700.785269725" Feb 16 15:45:01 crc kubenswrapper[4816]: E0216 15:45:01.634611 4816 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.244:52914->38.129.56.244:45769: write tcp 38.129.56.244:52914->38.129.56.244:45769: write: broken pipe Feb 16 15:45:02 crc kubenswrapper[4816]: I0216 15:45:02.428149 4816 generic.go:334] "Generic (PLEG): container finished" podID="ef8aebd6-9be8-462c-947f-ab4c7af14f7c" containerID="3953f0f80d7ef5d6f51f428d23941e95d60f2432ce00fb3ab253fd1182823e08" exitCode=0 Feb 16 15:45:02 crc kubenswrapper[4816]: I0216 15:45:02.428275 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" event={"ID":"ef8aebd6-9be8-462c-947f-ab4c7af14f7c","Type":"ContainerDied","Data":"3953f0f80d7ef5d6f51f428d23941e95d60f2432ce00fb3ab253fd1182823e08"} Feb 16 15:45:03 crc kubenswrapper[4816]: I0216 15:45:03.870289 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.032678 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-config-volume\") pod \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.032836 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvn9z\" (UniqueName: \"kubernetes.io/projected/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-kube-api-access-mvn9z\") pod \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.032873 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-secret-volume\") pod \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\" (UID: \"ef8aebd6-9be8-462c-947f-ab4c7af14f7c\") " Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.034526 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-config-volume" (OuterVolumeSpecName: "config-volume") pod "ef8aebd6-9be8-462c-947f-ab4c7af14f7c" (UID: "ef8aebd6-9be8-462c-947f-ab4c7af14f7c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.040788 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-kube-api-access-mvn9z" (OuterVolumeSpecName: "kube-api-access-mvn9z") pod "ef8aebd6-9be8-462c-947f-ab4c7af14f7c" (UID: "ef8aebd6-9be8-462c-947f-ab4c7af14f7c"). InnerVolumeSpecName "kube-api-access-mvn9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.052224 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ef8aebd6-9be8-462c-947f-ab4c7af14f7c" (UID: "ef8aebd6-9be8-462c-947f-ab4c7af14f7c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.134983 4816 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-config-volume\") on node \"crc\" DevicePath \"\"" Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.135018 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvn9z\" (UniqueName: \"kubernetes.io/projected/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-kube-api-access-mvn9z\") on node \"crc\" DevicePath \"\"" Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.135033 4816 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef8aebd6-9be8-462c-947f-ab4c7af14f7c-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.474506 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" event={"ID":"ef8aebd6-9be8-462c-947f-ab4c7af14f7c","Type":"ContainerDied","Data":"d163f65c616d8d8a3a081ca99481cb1446146a5754e8dcfa349dd3c3ec076922"} Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.474807 4816 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d163f65c616d8d8a3a081ca99481cb1446146a5754e8dcfa349dd3c3ec076922" Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.474561 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29520945-9jxhj" Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.512011 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t"] Feb 16 15:45:04 crc kubenswrapper[4816]: I0216 15:45:04.525467 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29520900-s4q7t"] Feb 16 15:45:05 crc kubenswrapper[4816]: I0216 15:45:05.420998 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddb6aa19-9826-4b45-96af-ee1f81060e16" path="/var/lib/kubelet/pods/ddb6aa19-9826-4b45-96af-ee1f81060e16/volumes" Feb 16 15:45:08 crc kubenswrapper[4816]: I0216 15:45:08.399244 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:45:08 crc kubenswrapper[4816]: E0216 15:45:08.400044 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:45:23 crc kubenswrapper[4816]: I0216 15:45:23.400343 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:45:23 crc kubenswrapper[4816]: E0216 15:45:23.401754 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:45:34 crc kubenswrapper[4816]: I0216 15:45:34.725007 4816 scope.go:117] "RemoveContainer" containerID="da488c5e1ef030897b774525f88d954a2ec2ae073e905c7d63855ba72afd63a1" Feb 16 15:45:36 crc kubenswrapper[4816]: I0216 15:45:36.400050 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:45:36 crc kubenswrapper[4816]: E0216 15:45:36.401980 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:45:47 crc kubenswrapper[4816]: I0216 15:45:47.399324 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:45:47 crc kubenswrapper[4816]: E0216 15:45:47.400328 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:45:48 crc kubenswrapper[4816]: I0216 15:45:48.852396 4816 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-galera-0" podUID="8fbda533-421c-4e67-8f65-4970f0c27924" containerName="galera" probeResult="failure" output="command timed out" Feb 16 15:45:48 crc kubenswrapper[4816]: I0216 15:45:48.852900 4816 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="8fbda533-421c-4e67-8f65-4970f0c27924" containerName="galera" probeResult="failure" output="command timed out" Feb 16 15:46:02 crc kubenswrapper[4816]: I0216 15:46:02.400042 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:46:02 crc kubenswrapper[4816]: E0216 15:46:02.401305 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:46:02 crc kubenswrapper[4816]: I0216 15:46:02.637877 4816 trace.go:236] Trace[366689944]: "Calculate volume metrics of ovndbcluster-sb-etc-ovn for pod openstack/ovsdbserver-sb-0" (16-Feb-2026 15:45:50.504) (total time: 12133ms): Feb 16 15:46:02 crc kubenswrapper[4816]: Trace[366689944]: [12.133078229s] [12.133078229s] END Feb 16 15:46:02 crc kubenswrapper[4816]: I0216 15:46:02.647273 4816 trace.go:236] Trace[1006529787]: "Calculate volume metrics of ovndbcluster-nb-etc-ovn for pod openstack/ovsdbserver-nb-2" (16-Feb-2026 15:45:51.542) (total time: 11104ms): Feb 16 15:46:02 crc kubenswrapper[4816]: Trace[1006529787]: [11.104968954s] [11.104968954s] END Feb 16 15:46:17 crc kubenswrapper[4816]: I0216 15:46:17.399783 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:46:17 crc kubenswrapper[4816]: E0216 15:46:17.401337 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:46:28 crc kubenswrapper[4816]: I0216 15:46:28.400491 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:46:28 crc kubenswrapper[4816]: E0216 15:46:28.401646 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:46:41 crc kubenswrapper[4816]: I0216 15:46:41.411248 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:46:41 crc kubenswrapper[4816]: E0216 15:46:41.412705 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:46:55 crc kubenswrapper[4816]: I0216 15:46:55.398727 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:46:55 crc kubenswrapper[4816]: E0216 15:46:55.399469 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:46:57 crc kubenswrapper[4816]: I0216 15:46:57.331982 4816 generic.go:334] "Generic (PLEG): container finished" podID="58505032-b628-4004-ba48-6cb633d4e3ec" containerID="b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1" exitCode=0 Feb 16 15:46:57 crc kubenswrapper[4816]: I0216 15:46:57.332450 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cmn85/must-gather-2mfgd" event={"ID":"58505032-b628-4004-ba48-6cb633d4e3ec","Type":"ContainerDied","Data":"b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1"} Feb 16 15:46:57 crc kubenswrapper[4816]: I0216 15:46:57.333492 4816 scope.go:117] "RemoveContainer" containerID="b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1" Feb 16 15:46:58 crc kubenswrapper[4816]: I0216 15:46:58.421657 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cmn85_must-gather-2mfgd_58505032-b628-4004-ba48-6cb633d4e3ec/gather/0.log" Feb 16 15:47:07 crc kubenswrapper[4816]: I0216 15:47:07.733908 4816 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cmn85/must-gather-2mfgd"] Feb 16 15:47:07 crc kubenswrapper[4816]: I0216 15:47:07.734682 4816 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-cmn85/must-gather-2mfgd" podUID="58505032-b628-4004-ba48-6cb633d4e3ec" containerName="copy" containerID="cri-o://f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f" gracePeriod=2 Feb 16 15:47:07 crc kubenswrapper[4816]: I0216 15:47:07.789497 4816 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cmn85/must-gather-2mfgd"] Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.251845 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cmn85_must-gather-2mfgd_58505032-b628-4004-ba48-6cb633d4e3ec/copy/0.log" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.252583 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.317718 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/58505032-b628-4004-ba48-6cb633d4e3ec-must-gather-output\") pod \"58505032-b628-4004-ba48-6cb633d4e3ec\" (UID: \"58505032-b628-4004-ba48-6cb633d4e3ec\") " Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.325178 4816 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pgdt\" (UniqueName: \"kubernetes.io/projected/58505032-b628-4004-ba48-6cb633d4e3ec-kube-api-access-8pgdt\") pod \"58505032-b628-4004-ba48-6cb633d4e3ec\" (UID: \"58505032-b628-4004-ba48-6cb633d4e3ec\") " Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.331982 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58505032-b628-4004-ba48-6cb633d4e3ec-kube-api-access-8pgdt" (OuterVolumeSpecName: "kube-api-access-8pgdt") pod "58505032-b628-4004-ba48-6cb633d4e3ec" (UID: "58505032-b628-4004-ba48-6cb633d4e3ec"). InnerVolumeSpecName "kube-api-access-8pgdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.428966 4816 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pgdt\" (UniqueName: \"kubernetes.io/projected/58505032-b628-4004-ba48-6cb633d4e3ec-kube-api-access-8pgdt\") on node \"crc\" DevicePath \"\"" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.453543 4816 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cmn85_must-gather-2mfgd_58505032-b628-4004-ba48-6cb633d4e3ec/copy/0.log" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.454257 4816 generic.go:334] "Generic (PLEG): container finished" podID="58505032-b628-4004-ba48-6cb633d4e3ec" containerID="f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f" exitCode=143 Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.454319 4816 scope.go:117] "RemoveContainer" containerID="f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.454469 4816 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cmn85/must-gather-2mfgd" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.487594 4816 scope.go:117] "RemoveContainer" containerID="b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.532318 4816 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58505032-b628-4004-ba48-6cb633d4e3ec-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "58505032-b628-4004-ba48-6cb633d4e3ec" (UID: "58505032-b628-4004-ba48-6cb633d4e3ec"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.588958 4816 scope.go:117] "RemoveContainer" containerID="f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f" Feb 16 15:47:08 crc kubenswrapper[4816]: E0216 15:47:08.589475 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f\": container with ID starting with f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f not found: ID does not exist" containerID="f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.589513 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f"} err="failed to get container status \"f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f\": rpc error: code = NotFound desc = could not find container \"f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f\": container with ID starting with f0f773c534ef40352347ae847185731571abc44b510b0686c721395ceb515d2f not found: ID does not exist" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.589535 4816 scope.go:117] "RemoveContainer" containerID="b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1" Feb 16 15:47:08 crc kubenswrapper[4816]: E0216 15:47:08.589776 4816 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1\": container with ID starting with b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1 not found: ID does not exist" containerID="b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.589801 4816 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1"} err="failed to get container status \"b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1\": rpc error: code = NotFound desc = could not find container \"b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1\": container with ID starting with b9a1b5ad8de0bd590ca98493d5fd21bc7cfdc2a756e940f0a1bb7bc7958cb2e1 not found: ID does not exist" Feb 16 15:47:08 crc kubenswrapper[4816]: I0216 15:47:08.633185 4816 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/58505032-b628-4004-ba48-6cb633d4e3ec-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 16 15:47:09 crc kubenswrapper[4816]: I0216 15:47:09.398850 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:47:09 crc kubenswrapper[4816]: E0216 15:47:09.399139 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:47:09 crc kubenswrapper[4816]: I0216 15:47:09.411637 4816 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58505032-b628-4004-ba48-6cb633d4e3ec" path="/var/lib/kubelet/pods/58505032-b628-4004-ba48-6cb633d4e3ec/volumes" Feb 16 15:47:23 crc kubenswrapper[4816]: I0216 15:47:23.399154 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:47:23 crc kubenswrapper[4816]: E0216 15:47:23.400114 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:47:36 crc kubenswrapper[4816]: I0216 15:47:36.399387 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:47:36 crc kubenswrapper[4816]: E0216 15:47:36.400188 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:47:50 crc kubenswrapper[4816]: I0216 15:47:50.398745 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:47:50 crc kubenswrapper[4816]: E0216 15:47:50.399870 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:48:02 crc kubenswrapper[4816]: I0216 15:48:02.399263 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:48:02 crc kubenswrapper[4816]: E0216 15:48:02.400025 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:48:17 crc kubenswrapper[4816]: I0216 15:48:17.399848 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:48:17 crc kubenswrapper[4816]: E0216 15:48:17.401535 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:48:31 crc kubenswrapper[4816]: I0216 15:48:31.408474 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:48:31 crc kubenswrapper[4816]: E0216 15:48:31.409646 4816 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f95nc_openshift-machine-config-operator(eb19d695-8c09-42cc-bc34-940019ab38dc)\"" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" podUID="eb19d695-8c09-42cc-bc34-940019ab38dc" Feb 16 15:48:45 crc kubenswrapper[4816]: I0216 15:48:45.398893 4816 scope.go:117] "RemoveContainer" containerID="fcc55658eb731847c6591a12af1aad3fb5eb14128e742b4fa6995c6913f8d4ca" Feb 16 15:48:46 crc kubenswrapper[4816]: I0216 15:48:46.597557 4816 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f95nc" event={"ID":"eb19d695-8c09-42cc-bc34-940019ab38dc","Type":"ContainerStarted","Data":"907e6ebe1fc7c68fbd360b639a65f151d9b75b026139f24adf375197cdc8f266"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515144636345024460 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015144636345017375 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015144612306016507 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015144612307015460 5ustar corecore